mirror of https://github.com/QMCPACK/qmcpack.git
3444 lines
117 KiB
Python
3444 lines
117 KiB
Python
##################################################################
|
|
## (c) Copyright 2015- by Jaron T. Krogel ##
|
|
##################################################################
|
|
|
|
|
|
#====================================================================#
|
|
# qmcpack_quantity_analyzers.py #
|
|
# Analyzer classes for specific quantities generated by QMCPACK. #
|
|
# Quantities include scalar values from scalars.dat, dmc.dat, #
|
|
# or stat.h5 and general quantities from stat.h5 such as the #
|
|
# energy density, 1-body density matrices, total densities, #
|
|
# spin densities, and static structure factors. Also supports #
|
|
# basic analysis of Traces data (multiple traces.h5 files). #
|
|
# #
|
|
# Content summary: #
|
|
# QuantityAnalyzer #
|
|
# Base class for specific quantity analyzers. #
|
|
# #
|
|
# DatAnalyzer #
|
|
# Base class containing common characteristics of *.dat file #
|
|
# analysis. #
|
|
# #
|
|
# ScalarsDatAnalyzer #
|
|
# Supports analysis specific to scalars.dat. #
|
|
# #
|
|
# DmcDatAnalyzer #
|
|
# Supports analysis specific to dmc.dat. #
|
|
# #
|
|
# HDFAnalyzer #
|
|
# Base class for analyzers of stat.h5 data. #
|
|
# #
|
|
# ScalarsHDFAnalyzer #
|
|
# Supports analysis specific to scalar values in stat.h5 #
|
|
# #
|
|
# EnergyDensityAnalyzer #
|
|
# Supports analysis of energy density data from stat.h5 #
|
|
# #
|
|
# DensityMatricesAnalyzer #
|
|
# Supports analysis of 1-body particle or energy density #
|
|
# matrices from stat.h5. #
|
|
# #
|
|
# DensityAnalyzer #
|
|
# Supports analysis of total densities from stat.h5. #
|
|
# #
|
|
# SpinDensityAnalyzer #
|
|
# Supports analysis of spin-resolved densities from stat.h5. #
|
|
# #
|
|
# StructureFactorAnalyzer #
|
|
# Supports analysis of spin-resolved static structure factors #
|
|
# from stat.h5. #
|
|
# #
|
|
# TracesFileHDF #
|
|
# Represents an HDF file containing traces data. #
|
|
# One traces.h5 file is produced per MPI process. #
|
|
# #
|
|
# TracesAnalyzer #
|
|
# Supports basic analysis of Traces data. #
|
|
# Can read multiple traces.h5 files and validate against #
|
|
# data contained in scalars.dat and dmc.dat. #
|
|
# #
|
|
# SpaceGrid #
|
|
# Specifically for energy density analysis #
|
|
# Represents a grid of data in 3-dimensional space. #
|
|
# Can represent rectilinear grids in Cartesian, cylindrical, or #
|
|
# or spherical coordinates as well as Voronoi grids. #
|
|
# #
|
|
#====================================================================#
|
|
|
|
|
|
import os
|
|
import re
|
|
from numpy import array,zeros,dot,loadtxt,ceil,floor,empty,sqrt,trace,savetxt,concatenate,real,imag,diag,arange,ones,identity
|
|
try:
|
|
from scipy.linalg import eig,LinAlgError
|
|
except Exception:
|
|
from numpy.linalg import eig,LinAlgError
|
|
#end try
|
|
from numerics import ndgrid,simstats,simplestats,equilibration_length
|
|
from generic import obj
|
|
from hdfreader import HDFreader
|
|
from qmcpack_analyzer_base import QAobject,QAanalyzer,QAdata,QAHDFdata
|
|
from fileio import XsfFile
|
|
from debug import *
|
|
|
|
|
|
class QuantityAnalyzer(QAanalyzer):
|
|
def __init__(self,nindent=0):
|
|
QAanalyzer.__init__(self,nindent=nindent)
|
|
self.method_info = QAanalyzer.method_info
|
|
#end def __init__
|
|
|
|
def plot_trace(self,quantity,*args,**kwargs):
|
|
from matplotlib.pyplot import plot,xlabel,ylabel,title,ylim
|
|
if 'data' in self:
|
|
if not quantity in self.data:
|
|
self.error('quantity '+quantity+' is not present in the data')
|
|
#end if
|
|
nbe = self.get_nblocks_exclude()
|
|
q = self.data[quantity]
|
|
middle = int(len(q)/2)
|
|
qmean = q[middle:].mean()
|
|
qmax = q[middle:].max()
|
|
qmin = q[middle:].min()
|
|
ylims = [qmean-2*(qmean-qmin),qmean+2*(qmax-qmean)]
|
|
smean,svar = self[quantity].tuple('mean','sample_variance')
|
|
sstd = sqrt(svar)
|
|
plot(q,*args,**kwargs)
|
|
plot([nbe,nbe],ylims,'k-.',lw=2)
|
|
plot([0,len(q)],[smean,smean],'r-')
|
|
plot([0,len(q)],[smean+sstd,smean+sstd],'r-.')
|
|
plot([0,len(q)],[smean-sstd,smean-sstd],'r-.')
|
|
ylim(ylims)
|
|
ylabel(quantity)
|
|
xlabel('samples')
|
|
title('Trace of '+quantity)
|
|
#end if
|
|
#end def QuantityAnalyzer
|
|
|
|
def init_sub_analyzers(self):
|
|
None
|
|
#end def init_sub_analyzers
|
|
|
|
def get_nblocks_exclude(self):
|
|
return self.info.nblocks_exclude
|
|
#end def get_nblocks_exclude
|
|
#end class QuantityAnalyzer
|
|
|
|
|
|
class DatAnalyzer(QuantityAnalyzer):
|
|
def __init__(self,filepath=None,equilibration=None,nindent=0):
|
|
QuantityAnalyzer.__init__(self,nindent=nindent)
|
|
self.info.filepath = filepath
|
|
nbe = self.method_info.nblocks_exclude
|
|
if equilibration!=None and nbe==-1:
|
|
self.load_data()
|
|
nbe = equilibration_length(self.data[equilibration])
|
|
assert nbe>=0, 'Number of equilibration blocks is negative.'
|
|
self.method_info.nblocks_exclude = nbe
|
|
#end if
|
|
#end def __init__
|
|
|
|
def analyze_local(self):
|
|
self.not_implemented()
|
|
#end def load_data_local
|
|
#end class DatAnalyzer
|
|
|
|
|
|
class ScalarsDatAnalyzer(DatAnalyzer):
|
|
def load_data_local(self):
|
|
filepath = self.info.filepath
|
|
quantities = QAanalyzer.request.quantities
|
|
|
|
lt = loadtxt(filepath)
|
|
if len(lt.shape)==1:
|
|
lt.shape = (1,len(lt))
|
|
#end if
|
|
|
|
data = lt[:,1:].transpose()
|
|
|
|
fobj = open(filepath,'r')
|
|
variables = fobj.readline().split()[2:]
|
|
fobj.close()
|
|
|
|
self.data = QAdata()
|
|
for i in range(len(variables)):
|
|
var = variables[i]
|
|
cvar = self.condense_name(var)
|
|
if cvar in quantities:
|
|
self.data[var]=data[i,:]
|
|
#end if
|
|
#end for
|
|
#end def load_data_local
|
|
|
|
|
|
def analyze_local(self):
|
|
nbe = QAanalyzer.method_info.nblocks_exclude
|
|
self.info.nblocks_exclude = nbe
|
|
data = self.data
|
|
for varname,samples in data.items():
|
|
(mean,var,error,kappa)=simstats(samples[nbe:])
|
|
self[varname] = obj(
|
|
mean = mean,
|
|
sample_variance = var,
|
|
error = error,
|
|
kappa = kappa
|
|
)
|
|
#end for
|
|
|
|
if 'LocalEnergy_sq' in data:
|
|
v = data.LocalEnergy_sq - data.LocalEnergy**2
|
|
(mean,var,error,kappa)=simstats(v[nbe:])
|
|
self.LocalEnergyVariance = obj(
|
|
mean = mean,
|
|
sample_variance = var,
|
|
error = error,
|
|
kappa = kappa
|
|
)
|
|
#end if
|
|
#end def analyze_data_local
|
|
#end class ScalarsDatAnalyzer
|
|
|
|
|
|
class DmcDatAnalyzer(DatAnalyzer):
|
|
def load_data_local(self):
|
|
filepath = self.info.filepath
|
|
|
|
lt = loadtxt(filepath)
|
|
if len(lt.shape)==1:
|
|
lt.shape = (1,len(lt))
|
|
#end if
|
|
|
|
data = lt[:,1:].transpose()
|
|
|
|
fobj = open(filepath,'r')
|
|
variables = fobj.readline().split()[2:]
|
|
fobj.close()
|
|
|
|
self.data = QAdata()
|
|
for i in range(len(variables)):
|
|
var = variables[i]
|
|
self.data[var]=data[i,:]
|
|
#end for
|
|
#end def load_data_local
|
|
|
|
|
|
def analyze_local(self):
|
|
nbe = QAanalyzer.method_info.nblocks_exclude
|
|
self.info.nblocks_exclude = nbe
|
|
data = self.data
|
|
|
|
input = self.run_info.input
|
|
series = self.method_info.series
|
|
ndmc_blocks = self.run_info.request.ndmc_blocks
|
|
|
|
#qmc = input.simulation.calculations[series]
|
|
qmc = input.get_qmc(series)
|
|
blocks = qmc.blocks
|
|
steps = qmc.steps
|
|
nse = nbe*steps
|
|
|
|
self.info.nsteps_exclude = nse
|
|
|
|
nsteps = len(data.list()[0])-nse
|
|
|
|
#nsteps = blocks*steps-nse
|
|
block_avg = nsteps > 2*ndmc_blocks
|
|
if block_avg:
|
|
block_size = int(floor(float(nsteps)/ndmc_blocks))
|
|
ndmc_blocks = int(floor(float(nsteps)/block_size))
|
|
nse += nsteps-ndmc_blocks*block_size
|
|
nsteps = ndmc_blocks*block_size
|
|
#end if
|
|
|
|
for varname,samples in data.items():
|
|
samp = samples[nse:]
|
|
if block_avg:
|
|
samp.shape = ndmc_blocks,block_size
|
|
samp = samp.mean(axis=1)
|
|
#end if
|
|
(mean,var,error,kappa)=simstats(samp)
|
|
self[varname] = obj(
|
|
mean = mean,
|
|
sample_variance = var,
|
|
error = error,
|
|
kappa = kappa
|
|
)
|
|
#end for
|
|
#end def load_data_local
|
|
|
|
|
|
def get_nblocks_exclude(self):
|
|
return self.info.nsteps_exclude
|
|
#end def get_nblocks_exclude
|
|
#end class DmcDatAnalyzer
|
|
|
|
|
|
class HDFAnalyzer(QuantityAnalyzer):
|
|
def __init__(self,nindent=0):
|
|
QuantityAnalyzer.__init__(self,nindent=nindent)
|
|
self.info.should_remove = False
|
|
#end def __init__
|
|
#end class HDFAnalyzer
|
|
|
|
|
|
class ScalarsHDFAnalyzer(HDFAnalyzer):
|
|
corrections = obj(
|
|
mpc = obj(ElecElec=-1,MPC=1),
|
|
kc = obj(KEcorr=1)
|
|
)
|
|
|
|
def __init__(self,exclude,nindent=0):
|
|
HDFAnalyzer.__init__(self,nindent=nindent)
|
|
self.info.exclude = exclude
|
|
#end def
|
|
|
|
|
|
def load_data_local(self,data=None):
|
|
if data==None:
|
|
self.error('attempted load without data')
|
|
#end if
|
|
exclude = self.info.exclude
|
|
self.data = QAHDFdata()
|
|
for var in list(data.keys()):
|
|
if not var in exclude and not str(var)[0]=='_' and not 'skall' in var.lower():
|
|
self.data[var] = data[var]
|
|
del data[var]
|
|
#end if
|
|
#end for
|
|
corrvars = ['LocalEnergy','ElecElec','MPC','KEcorr']
|
|
if set(corrvars)<set(self.data.keys()):
|
|
Ed,Ved,Vmd,Kcd = self.data.tuple(*corrvars)
|
|
E_mpc_kc = obj()
|
|
E = Ed.value
|
|
Ve = Ved.value
|
|
Vm = Vmd.value
|
|
Kc = Kcd.value
|
|
E_mpc_kc.value = E-Ve+Vm+Kc
|
|
if 'value_squared' in Ed:
|
|
E2 = Ed.value_squared
|
|
Ve2 = Ved.value_squared
|
|
Vm2 = Vmd.value_squared
|
|
Kc2 = Kcd.value_squared
|
|
E_mpc_kc.value_squared = E2+Ve2+Vm2+Kc2 + 2*(E*(-Ve+Vm+Kc)-Ve*(Vm+Kc)+Vm*Kc)
|
|
#end if
|
|
self.data.LocalEnergy_mpc_kc = E_mpc_kc
|
|
#end if
|
|
#end def load_data_local
|
|
|
|
|
|
def analyze_local(self):
|
|
nbe = QAanalyzer.method_info.nblocks_exclude
|
|
self.info.nblocks_exclude = nbe
|
|
for varname,val in self.data.items():
|
|
(mean,var,error,kappa)=simstats(val.value[nbe:,...].ravel())
|
|
if 'value_squared' in val:
|
|
variance = val.value_squared[nbe:,...].mean()-mean**2
|
|
else:
|
|
variance = var
|
|
#end if
|
|
self[varname] = obj(
|
|
mean = mean,
|
|
variance = variance,
|
|
sample_variance = var,
|
|
error = error,
|
|
kappa = kappa
|
|
)
|
|
#end for
|
|
self.correct('mpc','kc')
|
|
#end def analyze_local
|
|
|
|
|
|
def correct(self,*corrections):
|
|
corrkey=''
|
|
for corr in corrections:
|
|
corrkey+=corr+'_'
|
|
#end for
|
|
corrkey=corrkey[:-1]
|
|
if set(corrections)>set(self.corrections.keys()):
|
|
self.warn('correction '+corrkey+' is unknown and cannot be applied')
|
|
return
|
|
#end if
|
|
if not 'data' in self:
|
|
self.warn('correction '+corrkey+' cannot be applied because data is not present')
|
|
return
|
|
#end if
|
|
varname = 'LocalEnergy_'+corrkey
|
|
if varname in self and varname in self.data:
|
|
return
|
|
#end if
|
|
corrvars = ['LocalEnergy']
|
|
signs = [1]
|
|
for corr in corrections:
|
|
for var,sign in self.corrections[corr].items():
|
|
corrvars.append(var)
|
|
signs.append(sign)
|
|
#end for
|
|
#end for
|
|
missing = list(set(corrvars)-set(self.data.keys()))
|
|
if len(missing)>0:
|
|
#self.warn('correction '+corrkey+' cannot be applied because '+str(missing)+' are missing')
|
|
return
|
|
#end if
|
|
|
|
le = self.data.LocalEnergy
|
|
E,E2 = 0*le.value,0*le.value_squared
|
|
n = len(corrvars)
|
|
for i in range(n):
|
|
ed = self.data[corrvars[i]]
|
|
e,e2 = ed.value,ed.value_squared
|
|
s = signs[i]
|
|
E += s*e
|
|
E2 += e2
|
|
for j in range(i+1,n):
|
|
eo = self.data[corrvars[j]].value
|
|
so = signs[j]
|
|
E2 += 2*s*e*so*eo
|
|
#end for
|
|
#end for
|
|
val = obj(value=E,value_squared=E2)
|
|
self.data[varname] = val
|
|
nbe = self.info.nblocks_exclude
|
|
(mean,var,error,kappa)=simstats(val.value[nbe:,...].ravel())
|
|
self[varname] = obj(
|
|
mean = mean,
|
|
variance = val.value_squared[nbe:,...].mean()-mean**2,
|
|
sample_variance = var,
|
|
error = error,
|
|
kappa = kappa
|
|
)
|
|
#end def correct
|
|
#end class ScalarsHDFAnalyzer
|
|
|
|
|
|
|
|
class EnergyDensityAnalyzer(HDFAnalyzer):
|
|
def __init__(self,name,nindent=0):
|
|
HDFAnalyzer.__init__(self,nindent=nindent)
|
|
self.info.set(
|
|
name = name,
|
|
reordered = False
|
|
)
|
|
#end def __init__
|
|
|
|
|
|
def load_data_local(self,data=None):
|
|
if data==None:
|
|
self.error('attempted load without data')
|
|
#end if
|
|
name = self.info.name
|
|
self.data = QAHDFdata()
|
|
if name in data:
|
|
hdfg = data[name]
|
|
hdfg._remove_hidden(deep=False)
|
|
self.data.transfer_from(hdfg)
|
|
del data[name]
|
|
else:
|
|
self.info.should_remove = True
|
|
#end if
|
|
#end def load_data_local
|
|
|
|
|
|
def analyze_local(self):
|
|
nbe = QAanalyzer.method_info.nblocks_exclude
|
|
self.info.nblocks_exclude = nbe
|
|
data = self.data
|
|
|
|
#why is this called 3 times?
|
|
#print nbe
|
|
|
|
#transfer hdf data
|
|
sg_pattern = re.compile(r'spacegrid\d*')
|
|
nspacegrids=0
|
|
# add simple data first
|
|
for k,v in data.items():
|
|
if not sg_pattern.match(k):
|
|
self[k] = v
|
|
else:
|
|
nspacegrids+=1
|
|
#end if
|
|
#end for
|
|
# add spacegrids second
|
|
opts = QAobject()
|
|
opts.points = self.reference_points
|
|
opts.nblocks_exclude = nbe
|
|
self.spacegrids=[]
|
|
if nspacegrids==0:
|
|
self.spacegrids.append(SpaceGrid(data.spacegrid,opts))
|
|
else:
|
|
for ig in range(nspacegrids):
|
|
sg=SpaceGrid(data['spacegrid'+str(ig+1)],opts)
|
|
self.spacegrids.append(sg)
|
|
#end for
|
|
#end if
|
|
|
|
#reorder atomic data to match input file for Voronoi grids
|
|
if self.run_info.type=='bundled':
|
|
self.info.reordered=True
|
|
#end if
|
|
if not self.info.reordered:
|
|
self.reorder_atomic_data()
|
|
#end if
|
|
|
|
#convert quantities outside all spacegrids
|
|
outside = QAobject()
|
|
iD,iT,iV = tuple(range(3))
|
|
outside.D = QAobject()
|
|
outside.T = QAobject()
|
|
outside.V = QAobject()
|
|
outside.E = QAobject()
|
|
outside.P = QAobject()
|
|
|
|
value = self.outside.value.transpose()[...,nbe:]
|
|
|
|
#mean,error = simplestats(value)
|
|
mean,var,error,kappa = simstats(value)
|
|
outside.D.mean = mean[iD]
|
|
outside.D.error = error[iD]
|
|
outside.T.mean = mean[iT]
|
|
outside.T.error = error[iT]
|
|
outside.V.mean = mean[iV]
|
|
outside.V.error = error[iV]
|
|
|
|
E = value[iT,:]+value[iV,:]
|
|
#mean,error = simplestats(E)
|
|
mean,var,error,kappa = simstats(E)
|
|
outside.E.mean = mean
|
|
outside.E.error = error
|
|
|
|
P = 2./3.*value[iT,:]+1./3.*value[iV,:]
|
|
#mean,error = simplestats(P)
|
|
mean,var,error,kappa = simstats(P)
|
|
outside.P.mean = mean
|
|
outside.P.error = error
|
|
|
|
self.outside = outside
|
|
|
|
self.outside.data = obj(
|
|
D = value[iD,:],
|
|
T = value[iT,:],
|
|
V = value[iV,:],
|
|
E = E,
|
|
P = P
|
|
)
|
|
|
|
# convert ion point data, if present
|
|
if 'ions' in self:
|
|
ions = QAobject()
|
|
ions.D = QAobject()
|
|
ions.T = QAobject()
|
|
ions.V = QAobject()
|
|
ions.E = QAobject()
|
|
ions.P = QAobject()
|
|
|
|
value = self.ions.value.transpose()[...,nbe:]
|
|
|
|
mean,var,error,kappa = simstats(value)
|
|
ions.D.mean = mean[iD]
|
|
ions.D.error = error[iD]
|
|
ions.T.mean = mean[iT]
|
|
ions.T.error = error[iT]
|
|
ions.V.mean = mean[iV]
|
|
ions.V.error = error[iV]
|
|
|
|
E = value[iT,:]+value[iV,:]
|
|
mean,var,error,kappa = simstats(E)
|
|
ions.E.mean = mean
|
|
ions.E.error = error
|
|
|
|
P = 2./3.*value[iT,:]+1./3.*value[iV,:]
|
|
mean,var,error,kappa = simstats(P)
|
|
ions.P.mean = mean
|
|
ions.P.error = error
|
|
|
|
ions.data = obj(
|
|
D = value[iD,:],
|
|
T = value[iT,:],
|
|
V = value[iV,:],
|
|
E = E,
|
|
P = P
|
|
)
|
|
|
|
self.ions = ions
|
|
#end if
|
|
|
|
return
|
|
#end def analyze_local
|
|
|
|
|
|
def reorder_atomic_data(self):
|
|
input = self.run_info.input
|
|
xml = self.run_info.ordered_input
|
|
ps = input.get('particlesets')
|
|
if 'ion0' in ps and len(ps.ion0.groups)>1 and 'size' in ps.ion0:
|
|
qsx = xml.simulation.qmcsystem
|
|
if len(ps)==1:
|
|
psx = qsx.particleset
|
|
else:
|
|
psx=None
|
|
for pst in qsx.particleset:
|
|
if pst.name=='ion0':
|
|
psx=pst
|
|
#end if
|
|
#end for
|
|
if psx==None:
|
|
self.error('ion0 particleset not found in qmcpack xml file for atomic reordering of Voronoi energy density')
|
|
#end if
|
|
#end if
|
|
|
|
#ordered ion names
|
|
# xml groups are ordered the same as in qmcpack's input file
|
|
ion_names = []
|
|
for gx in psx.group:
|
|
ion_names.append(gx.name)
|
|
#end for
|
|
|
|
#create the mapping to restore proper ordering
|
|
nions = ps.ion0.size
|
|
ions = ps.ion0.ionid
|
|
imap=empty((nions,),dtype=int)
|
|
icurr = 0
|
|
for ion_name in ion_names:
|
|
for i in range(len(ions)):
|
|
if ions[i]==ion_name:
|
|
imap[i]=icurr
|
|
icurr+=1
|
|
#end if
|
|
#end for
|
|
#end for
|
|
|
|
#reorder the atomic data
|
|
for sg in self.spacegrids:
|
|
sg.reorder_atomic_data(imap)
|
|
#end for
|
|
#end if
|
|
self.info.reordered=True
|
|
return
|
|
#end def reorder_atomic_data
|
|
|
|
|
|
def remove_data(self):
|
|
QAanalyzer.remove_data(self)
|
|
if 'spacegrids' in self:
|
|
for sg in self.spacegrids:
|
|
if 'data' in sg:
|
|
del sg.data
|
|
#end if
|
|
#end for
|
|
#end if
|
|
if 'outside' in self and 'data' in self.outside:
|
|
del self.outside.data
|
|
#end if
|
|
#end def remove_data
|
|
|
|
|
|
#def prev_init(self):
|
|
# if data._contains_group("spacegrid1"):
|
|
# self.points = data.spacegrid1.domain_centers
|
|
# self.axinv = data.spacegrid1.axinv
|
|
# val = data.spacegrid1.value
|
|
# npoints,ndim = self.points.shape
|
|
# self.E = zeros((npoints,))
|
|
# print 'p shape ',self.points.shape
|
|
# print 'v shape ',val.shape
|
|
# nblocks,nvpoints = val.shape
|
|
# for b in range(nblocks):
|
|
# for i in range(npoints):
|
|
# ind = 6*i
|
|
# self.E[i] += val[b,ind+1] + val[b,ind+2]
|
|
# #end for
|
|
# #end for
|
|
# #end if
|
|
##end def prev_init
|
|
|
|
|
|
|
|
def isosurface(self):
|
|
from enthought.mayavi import mlab
|
|
|
|
npoints,ndim = self.points.shape
|
|
dimensions = array([20,20,20])
|
|
|
|
x = zeros(dimensions)
|
|
y = zeros(dimensions)
|
|
z = zeros(dimensions)
|
|
s = zeros(dimensions)
|
|
|
|
ipoint = 0
|
|
for i in range(dimensions[0]):
|
|
for j in range(dimensions[1]):
|
|
for k in range(dimensions[2]):
|
|
r = self.points[ipoint,:]
|
|
u = dot(self.axinv,r)
|
|
#u=r
|
|
x[i,j,k] = u[0]
|
|
y[i,j,k] = u[1]
|
|
z[i,j,k] = u[2]
|
|
s[i,j,k] = self.E[ipoint]
|
|
ipoint+=1
|
|
#end for
|
|
#end for
|
|
#end for
|
|
|
|
mlab.contour3d(x,y,z,s)
|
|
mlab.show()
|
|
|
|
return
|
|
#end def isosurface
|
|
|
|
def mesh(self):
|
|
return
|
|
#end def mesh
|
|
|
|
def etest(self):
|
|
from enthought.mayavi import mlab
|
|
from numpy import pi, sin, cos, exp, arange, array
|
|
ni=10
|
|
dr, dphi, dtheta = 1.0/ni, 2*pi/ni, pi/ni
|
|
|
|
rlin = arange(0.0,1.0+dr,dr)
|
|
plin = arange(0.0,2*pi+dphi,dphi)
|
|
tlin = arange(0.0,pi+dtheta,dtheta)
|
|
r,phi,theta = ndgrid(rlin,plin,tlin)
|
|
|
|
a=1
|
|
|
|
fr = .5*exp(-r/a)*(cos(2*pi*r/a)+1.0)
|
|
fp = (1.0/6.0)*(cos(3.0*phi)+5.0)
|
|
ft = (1.0/6.0)*(cos(10.0*theta)+5.0)
|
|
|
|
f = fr*fp*ft
|
|
|
|
x = r*sin(theta)*cos(phi)
|
|
y = r*sin(theta)*sin(phi)
|
|
z = r*cos(theta)
|
|
|
|
|
|
#mayavi
|
|
#mlab.contour3d(x,y,z,f)
|
|
#mlab.contour3d(r,phi,theta,f)
|
|
i=7
|
|
#mlab.mesh(x[i],y[i],z[i],scalars=f[i])
|
|
mlab.mesh(f[i]*x[i],f[i]*y[i],f[i]*z[i],scalars=f[i])
|
|
mlab.show()
|
|
|
|
return
|
|
#end def test
|
|
|
|
|
|
def mtest(self):
|
|
from enthought.mayavi import mlab
|
|
# Create the data.
|
|
from numpy import pi, sin, cos, mgrid, arange, array
|
|
ni = 100.0
|
|
dtheta, dphi = pi/ni, pi/ni
|
|
|
|
#[theta,phi] = mgrid[0:pi+dtheta:dtheta,0:2*pi+dphi:dphi]
|
|
|
|
#tlin = arange(0,pi+dtheta,dtheta)
|
|
#plin = arange(0,2*pi+dphi,dphi)
|
|
tlin = pi*array([0,.12,.2,.31,.43,.56,.63,.75,.87,.92,1])
|
|
plin = 2*pi*array([0,.11,.22,.34,.42,.58,.66,.74,.85,.97,1])
|
|
theta,phi = ndgrid(tlin,plin)
|
|
|
|
fp = (1.0/6.0)*(cos(3.0*phi)+5.0)
|
|
ft = (1.0/6.0)*(cos(10.0*theta)+5.0)
|
|
|
|
r = fp*ft
|
|
|
|
x = r*sin(theta)*cos(phi)
|
|
y = r*sin(theta)*sin(phi)
|
|
z = r*cos(theta)
|
|
|
|
# View it.
|
|
s = mlab.mesh(x, y, z, scalars=r)
|
|
mlab.show()
|
|
return
|
|
#end def
|
|
|
|
|
|
def test(self):
|
|
from enthought.mayavi import mlab
|
|
from numpy import array,dot,arange,sin,ogrid,mgrid,zeros
|
|
|
|
n=10
|
|
n2=2*n
|
|
s = '-'+str(n)+':'+str(n)+':'+str(n2)+'j'
|
|
self.error('alternative to exec needed')
|
|
#exec('x, y, z = ogrid['+s+','+s+','+s+']')
|
|
del s
|
|
|
|
#x, y, z = ogrid[-10:10:20j, -10:10:20j, -10:10:20j]
|
|
#x, y, z = mgrid[-10:11:1, -10:11:1, -10:11:1]
|
|
|
|
s = sin(x*y*z)/(x*y*z)
|
|
|
|
|
|
#xl = [-5.0,-4.2,-3.5,-2.1,-1.7,-0.4,0.7,1.8,2.6,3.7,4.3,5.0]
|
|
#yl = [-5.0,-4.3,-3.6,-2.2,-1.8,-0.3,0.8,1.7,2.7,3.6,4.4,5.0]
|
|
#zl = [-5.0,-4.4,-3.7,-2.3,-1.9,-0.4,0.9,1.6,2.8,3.5,4.5,5.0]
|
|
dx = 2.0*n/(2.0*n-1.0)
|
|
xl = arange(-n,n+dx,dx)
|
|
yl = xl
|
|
zl = xl
|
|
|
|
x,y,z = ndgrid(xl,yl,zl)
|
|
|
|
s2 = sin(x*y*z)/(x*y*z)
|
|
|
|
#shear the grid
|
|
nx,ny,nz = x.shape
|
|
A = array([[1,1,-1],[1,-1,1],[-1,1,1]])
|
|
#A = array([[3,2,1],[0,2,1],[0,0,1]])
|
|
#A = array([[4,7,2],[8,4,3],[2,5,3]])
|
|
#A = 1.0*array([[1,2,3],[4,5,6],[7,8,9]]).transpose()
|
|
r = zeros((3,))
|
|
np=0
|
|
for i in range(nx):
|
|
for j in range(ny):
|
|
for k in range(nz):
|
|
r[0] = x[i,j,k]
|
|
r[1] = y[i,j,k]
|
|
r[2] = z[i,j,k]
|
|
|
|
#print np,r[0],r[1],r[2]
|
|
np+=1
|
|
|
|
r = dot(A,r)
|
|
x[i,j,k] = r[0]
|
|
y[i,j,k] = r[1]
|
|
z[i,j,k] = r[2]
|
|
#end for
|
|
#end for
|
|
#end for
|
|
s2 = sin(x*y*z)/(x*y*z)
|
|
|
|
mlab.contour3d(x,y,z,s2)
|
|
mlab.show()
|
|
|
|
out = QAobject()
|
|
out.x=x
|
|
out.y=y
|
|
out.z=z
|
|
out.s=s2
|
|
out.A=A
|
|
|
|
return out
|
|
#end def
|
|
|
|
|
|
def test_structured(self):
|
|
|
|
import numpy as np
|
|
from numpy import cos, sin, pi
|
|
from enthought.tvtk.api import tvtk
|
|
from enthought.mayavi import mlab
|
|
|
|
def generate_annulus(r=None, theta=None, z=None):
|
|
""" Generate points for structured grid for a cylindrical annular
|
|
volume. This method is useful for generating a unstructured
|
|
cylindrical mesh for VTK (and perhaps other tools).
|
|
|
|
Parameters
|
|
----------
|
|
r : array : The radial values of the grid points.
|
|
It defaults to linspace(1.0, 2.0, 11).
|
|
|
|
theta : array : The angular values of the x axis for the grid
|
|
points. It defaults to linspace(0,2*pi,11).
|
|
|
|
z: array : The values along the z axis of the grid points.
|
|
It defaults to linspace(0,0,1.0, 11).
|
|
|
|
Return
|
|
------
|
|
points : array
|
|
Nx3 array of points that make up the volume of the annulus.
|
|
They are organized in planes starting with the first value
|
|
of z and with the inside "ring" of the plane as the first
|
|
set of points. The default point array will be 1331x3.
|
|
"""
|
|
# Default values for the annular grid.
|
|
if r is None: r = np.linspace(1.0, 2.0, 11)
|
|
if theta is None: theta = np.linspace(0, 2*pi, 11)
|
|
if z is None: z = np.linspace(0.0, 1.0, 11)
|
|
|
|
# Find the x values and y values for each plane.
|
|
x_plane = (cos(theta)*r[:,None]).ravel()
|
|
y_plane = (sin(theta)*r[:,None]).ravel()
|
|
|
|
# Allocate an array for all the points. We'll have len(x_plane)
|
|
# points on each plane, and we have a plane for each z value, so
|
|
# we need len(x_plane)*len(z) points.
|
|
points = np.empty([len(x_plane)*len(z),3])
|
|
|
|
# Loop through the points for each plane and fill them with the
|
|
# correct x,y,z values.
|
|
start = 0
|
|
for z_plane in z:
|
|
end = start + len(x_plane)
|
|
# slice out a plane of the output points and fill it
|
|
# with the x,y, and z values for this plane. The x,y
|
|
# values are the same for every plane. The z value
|
|
# is set to the current z
|
|
plane_points = points[start:end]
|
|
plane_points[:,0] = x_plane
|
|
plane_points[:,1] = y_plane
|
|
plane_points[:,2] = z_plane
|
|
start = end
|
|
|
|
return points
|
|
|
|
# Make the data.
|
|
dims = (51, 25, 25)
|
|
# Note here that the 'x' axis corresponds to 'theta'
|
|
theta = np.linspace(0, 2*np.pi, dims[0])
|
|
# 'y' corresponds to varying 'r'
|
|
r = np.linspace(1, 10, dims[1])
|
|
z = np.linspace(0, 5, dims[2])
|
|
pts = generate_annulus(r, theta, z)
|
|
# Uncomment the following if you want to add some noise to the data.
|
|
#pts += np.random.randn(dims[0]*dims[1]*dims[2], 3)*0.04
|
|
sgrid = tvtk.StructuredGrid(dimensions=dims)
|
|
sgrid.points = pts
|
|
s = np.sqrt(pts[:,0]**2 + pts[:,1]**2 + pts[:,2]**2)
|
|
sgrid.point_data.scalars = np.ravel(s.copy())
|
|
sgrid.point_data.scalars.name = 'scalars'
|
|
|
|
contour = mlab.pipeline.contour(sgrid)
|
|
mlab.pipeline.surface(contour)
|
|
|
|
|
|
return
|
|
#end def test_structured
|
|
|
|
|
|
|
|
#end class EnergyDensityAnalyzer
|
|
|
|
|
|
|
|
|
|
class TracesFileHDF(QAobject):
|
|
def __init__(self,filepath=None,blocks=None):
|
|
self.info = obj(
|
|
filepath = filepath,
|
|
loaded = False,
|
|
accumulated = False,
|
|
particle_sums_valid = None,
|
|
blocks = blocks
|
|
)
|
|
#end def __init__
|
|
|
|
def loaded(self):
|
|
return self.info.loaded
|
|
#end def loaded
|
|
|
|
def accumulated_scalars(self):
|
|
return self.info.accumulated
|
|
#end def accumulated_scalars
|
|
|
|
def checked_particle_sums(self):
|
|
return self.info.particle_sums_valid!=None
|
|
#end def checked_particle_sums
|
|
|
|
def formed_diagnostic_data(self):
|
|
return self.accumulated_scalars() and self.checked_particle_sums()
|
|
#end def formed_diagnostic_data
|
|
|
|
def load(self,filepath=None,force=False):
|
|
if not self.loaded() or force:
|
|
if filepath is None:
|
|
if self.info.filepath is None:
|
|
self.error('cannot load traces data, filepath has not been defined')
|
|
else:
|
|
filepath = self.info.filepath
|
|
#end if
|
|
#end if
|
|
hr = HDFreader(filepath)
|
|
if not hr._success:
|
|
self.warn(' hdf file seems to be corrupted, skipping contents:\n '+filepath)
|
|
#end if
|
|
hdf = hr.obj
|
|
hdf._remove_hidden()
|
|
for name,buffer in hdf.items():
|
|
self.init_trace(name,buffer)
|
|
#end for
|
|
self.info.loaded = True
|
|
#end if
|
|
#end def load
|
|
|
|
def unload(self):
|
|
if self.loaded():
|
|
if 'int_traces' in self:
|
|
del self.int_traces
|
|
#end if
|
|
if 'real_traces' in self:
|
|
del self.real_traces
|
|
#end if
|
|
self.info.loaded = False
|
|
#end if
|
|
#end def unload
|
|
|
|
def init_trace(self,name,fbuffer):
|
|
trace = obj()
|
|
if 'traces' in fbuffer:
|
|
ftrace = fbuffer.traces
|
|
nrows = len(ftrace)
|
|
for dname,fdomain in fbuffer.layout.items():
|
|
domain = obj()
|
|
for qname,fquantity in fdomain.items():
|
|
q = obj()
|
|
for vname,value in fquantity.items():
|
|
q[vname] = value[0]
|
|
#end for
|
|
quantity = ftrace[:,q.row_start:q.row_end]
|
|
if q.unit_size==1:
|
|
shape = [nrows]+list(fquantity.shape[0:q.dimension])
|
|
else:
|
|
shape = [nrows]+list(fquantity.shape[0:q.dimension])+[q.unit_size]
|
|
#end if
|
|
quantity.shape = tuple(shape)
|
|
#if len(fquantity.shape)==q.dimension:
|
|
# quantity.shape = tuple([nrows]+list(fquantity.shape))
|
|
##end if
|
|
domain[qname] = quantity
|
|
#end for
|
|
trace[dname] = domain
|
|
#end for
|
|
#end if
|
|
self[name.replace('data','traces')] = trace
|
|
#end def init_trace
|
|
|
|
|
|
def check_particle_sums(self,tol=1e-8,force=False):
|
|
if not self.checked_particle_sums() or force:
|
|
self.load()
|
|
t = self.real_traces
|
|
scalar_names = set(t.scalars.keys())
|
|
other_names = []
|
|
for dname,domain in t.items():
|
|
if dname!='scalars':
|
|
other_names.extend(domain.keys())
|
|
#end if
|
|
#end for
|
|
other_names = set(other_names)
|
|
sum_names = scalar_names & other_names
|
|
same = True
|
|
for qname in sum_names:
|
|
q = t.scalars[qname]
|
|
qs = 0*q
|
|
for dname,domain in t.items():
|
|
if dname!='scalars' and qname in domain:
|
|
tqs = domain[qname].sum(1)
|
|
if len(tqs.shape)==1:
|
|
qs[:,0] += tqs
|
|
else:
|
|
qs[:,0] += tqs[:,0]
|
|
#end if
|
|
#end if
|
|
#end for
|
|
same = same and (abs(q-qs)<tol).all()
|
|
#end for
|
|
self.info.particle_sums_valid = same
|
|
#end if
|
|
return self.info.particle_sums_valid
|
|
#end def check_particle_sums
|
|
|
|
|
|
def accumulate_scalars(self,force=False):
|
|
if not self.accumulated_scalars() or force:
|
|
# get block and step information for the qmc method
|
|
blocks = self.info.blocks
|
|
if blocks is None:
|
|
self.scalars_by_step = None
|
|
self.scalars_by_block = None
|
|
return
|
|
#end if
|
|
# load in traces data if it isn't already
|
|
self.load()
|
|
# real and int traces
|
|
tr = self.real_traces
|
|
ti = self.int_traces
|
|
# names shared by traces and scalar files
|
|
scalar_names = set(tr.scalars.keys())
|
|
# step and weight traces
|
|
st = ti.scalars.step
|
|
wt = tr.scalars.weight
|
|
if len(st)!=len(wt):
|
|
self.error('weight and steps traces have different lengths')
|
|
#end if
|
|
#recompute steps (can vary for vmc w/ samples/samples_per_thread)
|
|
steps = st.max()+1
|
|
steps_per_block = steps//blocks
|
|
# accumulate weights into steps and blocks
|
|
ws = zeros((steps,))
|
|
wb = zeros((blocks,))
|
|
for t in range(len(wt)):
|
|
ws[st[t]] += wt[t]
|
|
#end for
|
|
s = 0
|
|
for b in range(blocks):
|
|
wb[b] = ws[s:s+steps_per_block].sum()
|
|
s+=steps_per_block
|
|
#end for
|
|
# accumulate walker population into steps
|
|
ps = zeros((steps,))
|
|
for t in range(len(wt)):
|
|
ps[st[t]] += 1
|
|
#end for
|
|
# accumulate quantities into steps and blocks
|
|
scalars_by_step = obj(Weight=ws,NumOfWalkers=ps)
|
|
scalars_by_block = obj(Weight=wb)
|
|
qs = zeros((steps,))
|
|
qb = zeros((blocks,))
|
|
quantities = set(tr.scalars.keys())
|
|
quantities.remove('weight')
|
|
for qname in quantities:
|
|
qt = tr.scalars[qname]
|
|
if len(qt)!=len(wt):
|
|
self.error('quantity {0} trace is not commensurate with weight and steps traces'.format(qname))
|
|
#end if
|
|
qs[:] = 0
|
|
for t in range(len(wt)):
|
|
qs[st[t]] += wt[t]*qt[t]
|
|
#end for
|
|
qb[:] = 0
|
|
s=0
|
|
for b in range(blocks):
|
|
qb[b] = qs[s:s+steps_per_block].sum()
|
|
s+=steps_per_block
|
|
#end for
|
|
qb = qb/wb
|
|
qs = qs/ws
|
|
scalars_by_step[qname] = qs.copy()
|
|
scalars_by_block[qname] = qb.copy()
|
|
#end for
|
|
self.scalars_by_step = scalars_by_step
|
|
self.scalars_by_block = scalars_by_block
|
|
self.info.accumulated = True
|
|
#end if
|
|
#end def accumulate_scalars
|
|
|
|
|
|
def form_diagnostic_data(self,tol=1e-8):
|
|
if not self.formed_diagnostic_data():
|
|
self.load()
|
|
self.accumulate_scalars()
|
|
self.check_particle_sums(tol=tol)
|
|
self.unload()
|
|
#end if
|
|
#end def form_diagnostic_data
|
|
#end class TracesFileHDF
|
|
|
|
|
|
|
|
class TracesAnalyzer(QAanalyzer):
|
|
def __init__(self,path,files,nindent=0):
|
|
QAanalyzer.__init__(self,nindent=nindent)
|
|
self.info.path = path
|
|
self.info.files = files
|
|
self.method_info = QAanalyzer.method_info
|
|
self.data = obj()
|
|
#end def __init__
|
|
|
|
|
|
def load_data_local(self):
|
|
if 'blocks' in self.method_info.method_input:
|
|
blocks = self.method_info.method_input.blocks
|
|
else:
|
|
blocks = None
|
|
#end if
|
|
path = self.info.path
|
|
files = self.info.files
|
|
self.data.clear()
|
|
for file in sorted(files):
|
|
filepath = os.path.join(path,file)
|
|
trace_file = TracesFileHDF(filepath,blocks)
|
|
self.data.append(trace_file)
|
|
#end for
|
|
#if self.run_info.request.traces:
|
|
# path = self.info.path
|
|
# files = self.info.files
|
|
# if len(files)>1:
|
|
# self.error('ability to read multiple trace files has not yet been implemented\n files requested: {0}'.format(files))
|
|
# #end if
|
|
# filepath = os.path.join(path,files[0])
|
|
# self.data = TracesFileHDF(filepath)
|
|
# ci(ls(),gs())
|
|
##end if
|
|
#end def load_data_local
|
|
|
|
|
|
def form_diagnostic_data(self):
|
|
for trace_file in self.data:
|
|
trace_file.form_diagnostic_data()
|
|
#end for
|
|
#end def form_diagnostic_data
|
|
|
|
def analyze_local(self):
|
|
None
|
|
#end def analyze_local
|
|
|
|
|
|
def check_particle_sums(self,tol=1e-8):
|
|
same = True
|
|
for trace_file in self.data:
|
|
same &= trace_file.check_particle_sums(tol=tol)
|
|
#end for
|
|
return same
|
|
#end def check_particle_sums
|
|
|
|
|
|
def check_scalars(self,scalars=None,scalars_hdf=None,tol=1e-8):
|
|
scalars_valid = True
|
|
scalars_hdf_valid = True
|
|
if scalars is None:
|
|
scalars_valid = None
|
|
#end if
|
|
if scalars_hdf is None:
|
|
scalars_hdf_valid = None
|
|
#end if
|
|
if len(self.data)>0:
|
|
scalar_names = set(self.data[0].scalars_by_block.keys())
|
|
summed_scalars = obj()
|
|
if scalars!=None:
|
|
qnames = set(scalars.keys()) & scalar_names
|
|
summed_scalars.clear()
|
|
for qname in qnames:
|
|
summed_scalars[qname] = zeros(scalars[qname].shape)
|
|
#end for
|
|
wtot = zeros(summed_scalars.first().shape)
|
|
for trace_file in self.data:
|
|
w = trace_file.scalars_by_block.Weight
|
|
wtot += w
|
|
for qname in qnames:
|
|
q = trace_file.scalars_by_block[qname]
|
|
summed_scalars[qname] += w*q
|
|
#end for
|
|
#end for
|
|
for qname in qnames:
|
|
qscalar = scalars[qname]
|
|
qb = summed_scalars[qname]/wtot
|
|
scalars_valid &= (abs(qb-qscalar)<tol).all()
|
|
#end for
|
|
#end if
|
|
if scalars_hdf!=None:
|
|
qnames = set(scalars_hdf.keys()) & scalar_names
|
|
summed_scalars.clear()
|
|
for qname in qnames:
|
|
summed_scalars[qname] = zeros((len(scalars_hdf[qname].value),))
|
|
#end for
|
|
wtot = zeros(summed_scalars.first().shape)
|
|
for trace_file in self.data:
|
|
w = trace_file.scalars_by_block.Weight
|
|
wtot += w
|
|
for qname in qnames:
|
|
q = trace_file.scalars_by_block[qname]
|
|
summed_scalars[qname] += w*q
|
|
#end for
|
|
#end for
|
|
for qname in qnames:
|
|
qscalar = scalars_hdf[qname].value.ravel()
|
|
qb = summed_scalars[qname]/wtot
|
|
scalars_hdf_valid &= (abs(qb-qscalar)<tol).all()
|
|
#end for
|
|
#end if
|
|
#end if
|
|
return scalars_valid,scalars_hdf_valid
|
|
#end def check_scalars
|
|
|
|
|
|
def check_dmc(self,dmc,tol=1e-8):
|
|
if dmc is None:
|
|
dmc_valid = None
|
|
else:
|
|
dmc_valid = True
|
|
if len(self.data)>0:
|
|
scalar_names = set(self.data[0].scalars_by_step.keys())
|
|
qnames = set(['LocalEnergy','Weight','NumOfWalkers']) & scalar_names
|
|
weighted = set(['LocalEnergy'])
|
|
summed_scalars = obj()
|
|
for qname in qnames:
|
|
summed_scalars[qname] = zeros(dmc[qname].shape)
|
|
#end for
|
|
wtot = zeros(summed_scalars.first().shape)
|
|
for trace_file in self.data:
|
|
w = trace_file.scalars_by_step.Weight
|
|
wtot += w
|
|
for qname in qnames:
|
|
q = trace_file.scalars_by_step[qname]
|
|
if qname in weighted:
|
|
summed_scalars[qname] += w*q
|
|
else:
|
|
summed_scalars[qname] += q
|
|
#end if
|
|
#end for
|
|
#end for
|
|
for qname in qnames:
|
|
qdmc = dmc[qname]
|
|
if qname in weighted:
|
|
qb = summed_scalars[qname]/wtot
|
|
else:
|
|
qb = summed_scalars[qname]
|
|
#end if
|
|
dmc_valid &= (abs(qb-qdmc)<tol).all()
|
|
#end for
|
|
#end if
|
|
#end if
|
|
return dmc_valid
|
|
#end def check_dmc
|
|
|
|
|
|
def check_scalars_old(self,scalars=None,scalars_hdf=None,tol=1e-8):
|
|
blocks = None
|
|
steps_per_block = None
|
|
steps = None
|
|
method_input = self.method_info.method_input
|
|
if 'blocks' in method_input:
|
|
blocks = method_input.blocks
|
|
#end if
|
|
if 'steps' in method_input:
|
|
steps_per_block = method_input.steps
|
|
#end if
|
|
if blocks!=None and steps_per_block!=None:
|
|
steps = blocks*steps_per_block
|
|
#end if
|
|
if steps is None:
|
|
return None,None
|
|
#end if
|
|
# real and int traces
|
|
tr = self.data.real_traces
|
|
ti = self.data.int_traces
|
|
# names shared by traces and scalar files
|
|
scalar_names = set(tr.scalars.keys())
|
|
# step and weight traces
|
|
st = ti.scalars.step
|
|
wt = tr.scalars.weight
|
|
if len(st)!=len(wt):
|
|
self.error('weight and steps traces have different lengths')
|
|
#end if
|
|
#recompute steps (can vary for vmc w/ samples/samples_per_thread)
|
|
steps = st.max()+1
|
|
steps_per_block = steps//blocks
|
|
# accumulate weights into steps and blocks
|
|
ws = zeros((steps,))
|
|
qs = zeros((steps,))
|
|
q2s = zeros((steps,))
|
|
wb = zeros((blocks,))
|
|
qb = zeros((blocks,))
|
|
q2b = zeros((blocks,))
|
|
for t in range(len(wt)):
|
|
ws[st[t]] += wt[t]
|
|
#end for
|
|
s = 0
|
|
for b in range(blocks):
|
|
wb[b] = ws[s:s+steps_per_block].sum()
|
|
s+=steps_per_block
|
|
#end for
|
|
# check scalar.dat
|
|
if scalars is None:
|
|
scalars_valid = None
|
|
else:
|
|
dat_names = set(scalars.keys()) & scalar_names
|
|
same = True
|
|
for qname in dat_names:
|
|
qt = tr.scalars[qname]
|
|
if len(qt)!=len(wt):
|
|
self.error('quantity {0} trace is not commensurate with weight and steps traces'.format(qname))
|
|
#end if
|
|
qs[:] = 0
|
|
for t in range(len(qt)):
|
|
qs[st[t]] += wt[t]*qt[t]
|
|
#end for
|
|
qb[:] = 0
|
|
s=0
|
|
for b in range(blocks):
|
|
qb[b] = qs[s:s+steps_per_block].sum()
|
|
s+=steps_per_block
|
|
#end for
|
|
qb = qb/wb
|
|
qs = qs/ws
|
|
qscalar = scalars[qname]
|
|
qsame = (abs(qb-qscalar)<tol).all()
|
|
#if not qsame and qname=='LocalEnergy':
|
|
# print ' scalar.dat LocalEnergy'
|
|
# print qscalar
|
|
# print qb
|
|
##end if
|
|
same = same and qsame
|
|
#end for
|
|
scalars_valid = same
|
|
#end if
|
|
# check scalars from stat.h5
|
|
if scalars_hdf is None:
|
|
scalars_hdf_valid = None
|
|
else:
|
|
hdf_names = set(scalars_hdf.keys()) & scalar_names
|
|
same = True
|
|
for qname in hdf_names:
|
|
qt = tr.scalars[qname]
|
|
if len(qt)!=len(wt):
|
|
self.error('quantity {0} trace is not commensurate with weight and steps traces'.format(qname))
|
|
#end if
|
|
qs[:] = 0
|
|
q2s[:] = 0
|
|
for t in range(len(qt)):
|
|
s = st[t]
|
|
w = wt[t]
|
|
q = qt[t]
|
|
qs[s] += w*q
|
|
q2s[s] += w*q*q
|
|
#end for
|
|
qb[:] = 0
|
|
s=0
|
|
for b in range(blocks):
|
|
qb[b] = qs[s:s+steps_per_block].sum()
|
|
q2b[b] = q2s[s:s+steps_per_block].sum()
|
|
s+=steps_per_block
|
|
#end for
|
|
qb = qb/wb
|
|
q2b = q2b/wb
|
|
qs = qs/ws
|
|
q2s = q2s/ws
|
|
qhdf = scalars_hdf[qname]
|
|
qscalar = qhdf.value.ravel()
|
|
q2scalar = qhdf.value_squared.ravel()
|
|
qsame = (abs(qb -qscalar )<tol).all()
|
|
q2same = (abs(q2b-q2scalar)<tol).all()
|
|
#if not qsame and qname=='LocalEnergy':
|
|
# print ' stat.h5 LocalEnergy'
|
|
# print qscalar
|
|
# print qb
|
|
##end if
|
|
same = same and qsame and q2same
|
|
#end for
|
|
scalars_hdf_valid = same
|
|
#end if
|
|
return scalars_valid,scalars_hdf_valid
|
|
#end def check_scalars_old
|
|
|
|
|
|
def check_dmc_old(self,dmc,tol=1e-8):
|
|
if dmc is None:
|
|
dmc_valid = None
|
|
else:
|
|
#dmc data
|
|
ene = dmc.LocalEnergy
|
|
wgt = dmc.Weight
|
|
pop = dmc.NumOfWalkers
|
|
# real and int traces
|
|
tr = self.data.real_traces
|
|
ti = self.data.int_traces
|
|
# names shared by traces and scalar files
|
|
scalar_names = set(tr.scalars.keys())
|
|
# step and weight traces
|
|
st = ti.scalars.step
|
|
wt = tr.scalars.weight
|
|
et = tr.scalars.LocalEnergy
|
|
if len(st)!=len(wt):
|
|
self.error('weight and steps traces have different lengths')
|
|
#end if
|
|
#recompute steps (can vary for vmc w/ samples/samples_per_thread)
|
|
steps = st.max()+1
|
|
# accumulate weights into steps
|
|
ws = zeros((steps,))
|
|
es = zeros((steps,))
|
|
ps = zeros((steps,))
|
|
for t in range(len(wt)):
|
|
ws[st[t]] += wt[t]
|
|
#end for
|
|
for t in range(len(wt)):
|
|
es[st[t]] += wt[t]*et[t]
|
|
#end for
|
|
for t in range(len(wt)):
|
|
ps[st[t]] += 1
|
|
#end for
|
|
es/=ws
|
|
psame = (abs(ps-pop)<tol).all()
|
|
wsame = (abs(ws-wgt)<tol).all()
|
|
esame = (abs(es-ene)<tol).all()
|
|
dmc_valid = psame and wsame and esame
|
|
#end if
|
|
return dmc_valid
|
|
#end def check_dmc_old
|
|
|
|
|
|
#methods that do not apply
|
|
def init_sub_analyzers(self):
|
|
None
|
|
def zero_data(self):
|
|
None
|
|
def minsize_data(self,other):
|
|
None
|
|
def accumulate_data(self,other):
|
|
None
|
|
def normalize_data(self,normalization):
|
|
None
|
|
#end class TracesAnalyzer
|
|
|
|
|
|
class DMSettings(QAobject):
|
|
def __init__(self,ds):
|
|
self.jackknife = True
|
|
self.diagonal = False
|
|
self.save_data = True
|
|
self.occ_tol = 1e-3
|
|
self.coup_tol = 1e-4
|
|
self.stat_tol = 2.0
|
|
if ds!=None:
|
|
for name,value in ds.items():
|
|
if not name in self:
|
|
self.error('{0} is an invalid setting for DensityMatricesAnalyzer\n valid options are: {1}'.format(name,sorted(self.keys())))
|
|
else:
|
|
self[name] = value
|
|
#end if
|
|
#end for
|
|
#end if
|
|
#end def __init__
|
|
#end class DMSettings
|
|
|
|
|
|
class DensityMatricesAnalyzer(HDFAnalyzer):
|
|
|
|
allowed_settings = ['save_data','jackknife','diagonal','occ_tol','coup_tol','stat_tol']
|
|
|
|
def __init__(self,name,nindent=0):
|
|
HDFAnalyzer.__init__(self)
|
|
self.info.name = name
|
|
#end def __init__
|
|
|
|
|
|
def load_data_local(self,data=None):
|
|
if data==None:
|
|
self.error('attempted load without data')
|
|
#end if
|
|
i = complex(0,1)
|
|
loc_data = QAdata()
|
|
name = self.info.name
|
|
self.info.complex = False
|
|
if name in data:
|
|
matrices = data[name]
|
|
del data[name]
|
|
matrices._remove_hidden()
|
|
for mname,matrix in matrices.items():
|
|
mdata = QAdata()
|
|
loc_data[mname] = mdata
|
|
for species,d in matrix.items():
|
|
v = d.value
|
|
if 'value_squared' in d:
|
|
v2 = d.value_squared
|
|
#end if
|
|
if len(v.shape)==4 and v.shape[3]==2:
|
|
d.value = v[:,:,:,0] + i*v[:,:,:,1]
|
|
if 'value_squared' in d:
|
|
d.value_squared = v2[:,:,:,0] + i*v2[:,:,:,1]
|
|
#end if
|
|
self.info.complex = True
|
|
#end if
|
|
mdata[species] = d
|
|
#end for
|
|
#end for
|
|
#end for
|
|
self.data = loc_data
|
|
self.info.should_remove = False
|
|
#end def load_data_local
|
|
|
|
|
|
def analyze_local(self):
|
|
# 1) exclude states that do not contribute to the number trace
|
|
# 2) exclude elements that are not statistically significant (1 sigma?)
|
|
# 3) use remaining states to form filtered number and energy matrices
|
|
# 4) perform jackknife sampling to get eigenvalue error bars
|
|
# 5) consider using cross-correlations w/ excluded elements to reduce variance
|
|
|
|
ds = DMSettings(self.run_info.request.dm_settings)
|
|
diagonal = ds.diagonal
|
|
jackknife = ds.jackknife and not diagonal
|
|
save_data = ds.save_data
|
|
occ_tol = ds.occ_tol
|
|
coup_tol = ds.coup_tol
|
|
stat_tol = ds.stat_tol
|
|
|
|
nbe = QAanalyzer.method_info.nblocks_exclude
|
|
self.info.nblocks_exclude = nbe
|
|
has_nmat = 'number_matrix' in self.data
|
|
has_emat = 'energy_matrix' in self.data
|
|
species = list(self.data.number_matrix.keys())
|
|
species_sizes = obj()
|
|
ps = self.run_info.input.get('particleset')
|
|
for s in species:
|
|
species_sizes[s] = ps.e.groups[s].size
|
|
#end for
|
|
mnames = []
|
|
if has_nmat:
|
|
mnames.append('number_matrix')
|
|
if has_emat:
|
|
mnames.append('energy_matrix')
|
|
#end if
|
|
#end if
|
|
|
|
for species_name in species:
|
|
for matrix_name in mnames:
|
|
if not matrix_name in self:
|
|
self[matrix_name] = obj()
|
|
#end if
|
|
mres = self[matrix_name]
|
|
msres = obj()
|
|
mres[species_name] = msres
|
|
|
|
species_data = self.data[matrix_name][species_name]
|
|
|
|
md_all = species_data.value
|
|
mdata = md_all[nbe:,...]
|
|
|
|
tdata = zeros((len(md_all),))
|
|
b = 0
|
|
for mat in md_all:
|
|
tdata[b] = trace(mat).real # trace sums to N-elec (real)
|
|
b+=1
|
|
#end for
|
|
t,tvar,terr,tkap = simstats(tdata[nbe:])
|
|
msres.trace = t
|
|
msres.trace_error = terr
|
|
|
|
if save_data:
|
|
msres.trace_data = tdata
|
|
msres.data = md_all
|
|
#end if
|
|
|
|
if diagonal:
|
|
ddata = empty(mdata.shape[0:2],dtype=mdata.dtype)
|
|
b = 0
|
|
for mat in mdata:
|
|
ddata[b] = diag(mat)
|
|
b+=1
|
|
#end for
|
|
d,dvar,derr,dkap = simstats(ddata.transpose())
|
|
msres.set(
|
|
eigval = d,
|
|
eigvec = identity(len(d)),
|
|
eigmean = d,
|
|
eigerr = derr
|
|
)
|
|
else:
|
|
m,mvar,merr,mkap = simstats(mdata.transpose((1,2,0)))
|
|
|
|
mfull = m
|
|
mefull = merr
|
|
|
|
if matrix_name=='number_matrix':
|
|
# remove states that do not have significant occupation
|
|
nspec = species_sizes[species_name]
|
|
occ = diag(m)/t*nspec
|
|
nstates = len(occ)
|
|
abs_occ = abs(occ)
|
|
abs_occ.sort()
|
|
nsum = 0
|
|
i = -1
|
|
min_occ = 0
|
|
for o in abs_occ:
|
|
if nsum+o<occ_tol:
|
|
nsum+=o
|
|
i+=1
|
|
#end if
|
|
#end if
|
|
if i!=-1:
|
|
min_occ = abs_occ[i]+1e-12
|
|
#end if
|
|
sig_states = arange(nstates)[abs(occ)>min_occ]
|
|
nsig = len(sig_states)
|
|
if nsig<nspec:
|
|
self.warn('number matrix fewer occupied states than particles')
|
|
sig_states = arange(nstates)
|
|
#end if
|
|
sig_occ = empty((nstates,nstates),dtype=bool)
|
|
sig_occ[:,:] = False
|
|
for s in sig_states:
|
|
sig_occ[s,sig_states] = True
|
|
#end for
|
|
#end if
|
|
# remove states with insignificant occupation
|
|
mos = m
|
|
m = m[sig_occ]
|
|
m.shape = nsig,nsig
|
|
merr = merr[sig_occ]
|
|
merr.shape = nsig,nsig
|
|
# remove off-diagonal elements with insignificant coupling
|
|
insig_coup = ones(m.shape,dtype=bool)
|
|
for i in range(nsig):
|
|
for j in range(nsig):
|
|
mdiag = min((abs(m[i,i]),abs(m[j,j])))
|
|
insig_coup[i,j] = abs(m[i,j])/mdiag < coup_tol
|
|
#end for
|
|
#end for
|
|
# remove elements with insignificant statistical deviation from zero
|
|
insig_stat = abs(m)/merr < stat_tol
|
|
# remove insignificant elements
|
|
insig_coup_stat = insig_coup | insig_stat
|
|
for i in range(nsig):
|
|
insig_coup_stat[i,i] = False
|
|
#end for
|
|
moi = m.copy()
|
|
m[insig_coup_stat] = 0.0
|
|
|
|
# obtain standard eigenvalue estimates
|
|
eigval,eigvec = eig(m)
|
|
|
|
# save common results
|
|
msres.set(
|
|
matrix = m,
|
|
matrix_error = merr,
|
|
sig_states = sig_states,
|
|
sig_occ = sig_occ,
|
|
insig_coup = insig_coup,
|
|
insig_stat = insig_stat,
|
|
insig_coup_stat = insig_coup_stat,
|
|
eigval = eigval,
|
|
eigvec = eigvec,
|
|
matrix_full = mfull,
|
|
matrix_error_full = mefull,
|
|
)
|
|
|
|
if jackknife:
|
|
# obtain jackknife eigenvalue estimates
|
|
nblocks = len(mdata)
|
|
mjdata = zeros((nblocks,nsig,nsig),dtype=mdata.dtype)
|
|
eigsum = zeros((nsig,),dtype=mdata.dtype)
|
|
eigsum2r = zeros((nsig,),dtype=mdata.dtype)
|
|
eigsum2i = zeros((nsig,),dtype=mdata.dtype)
|
|
i = complex(0,1)
|
|
nb = float(nblocks)
|
|
for b in range(nblocks):
|
|
mb = mdata[b,...][sig_occ]
|
|
mb.shape = nsig,nsig
|
|
mb[insig_coup_stat] = 0.0
|
|
mj = (nb*m-mb)/(nb-1)
|
|
mjdata[b,...] = mj
|
|
d,v = eig(mj)
|
|
eigsum += d
|
|
eigsum2r += real(d)**2
|
|
eigsum2i += imag(d)**2
|
|
#end for
|
|
eigmean = eigsum/nb
|
|
esr = real(eigsum)
|
|
esi = imag(eigsum)
|
|
eigvar = (nb-1)/nb*(eigsum2r+i*eigsum2i-(esr**2+i*esi**2)/nb)
|
|
eigerr = sqrt(real(eigvar))+i*sqrt(imag(eigvar))
|
|
msres.set(
|
|
eigmean = eigmean,
|
|
eigerr = eigerr
|
|
)
|
|
|
|
# perform generalized eigenvalue analysis for energy matrix
|
|
if matrix_name=='number_matrix':
|
|
nmjdata = mjdata
|
|
nm = m
|
|
elif matrix_name=='energy_matrix':
|
|
# obtain general eigenvalue estimates
|
|
em = m
|
|
geigval,geigvec = eig(em,nm)
|
|
# get occupations of eigenvectors
|
|
eigocc = zeros((nsig,),dtype=mdata.dtype)
|
|
geigocc = zeros((nsig,),dtype=mdata.dtype)
|
|
for k in range(nsig):
|
|
v = eigvec[:,k]
|
|
eigocc[k] = dot(v.conj(),dot(nm,v))
|
|
v = geigvec[:,k]
|
|
geigocc[k] = dot(v.conj(),dot(nm,v))
|
|
#end for
|
|
# obtain jackknife estimates of generalized eigenvalues
|
|
emjdata = mjdata
|
|
eigsum[:] = 0.0
|
|
eigsum2r[:] = 0.0
|
|
eigsum2i[:] = 0.0
|
|
for b in range(nblocks):
|
|
d,v = eig(emjdata[b,...],nmjdata[b,...])
|
|
eigsum += d
|
|
eigsum2r += real(d)**2
|
|
eigsum2i += imag(d)**2
|
|
#end for
|
|
geigmean = eigsum/nb
|
|
esr = real(eigsum)
|
|
esi = imag(eigsum)
|
|
eigvar = (nb-1)/nb*(eigsum2r+i*eigsum2i-(esr**2+i*esi**2)/nb)
|
|
geigerr = sqrt(real(eigvar))+i*sqrt(imag(eigvar))
|
|
# save the results
|
|
msres.set(
|
|
eigocc = eigocc,
|
|
geigocc = geigocc,
|
|
geigval = geigval,
|
|
geigvec = geigvec,
|
|
geigmean = geigmean,
|
|
geigerr = geigerr
|
|
)
|
|
#end if
|
|
#end if
|
|
#end if
|
|
#end for
|
|
#end for
|
|
del self.data
|
|
#self.write_files()
|
|
#end def analyze_local
|
|
|
|
|
|
def analyze_local_orig(self):
|
|
nbe = QAanalyzer.method_info.nblocks_exclude
|
|
self.info.nblocks_exclude = nbe
|
|
for matrix_name,matrix_data in self.data.items():
|
|
mres = obj()
|
|
self[matrix_name] = mres
|
|
for species_name,species_data in matrix_data.items():
|
|
md_all = species_data.value
|
|
mdata = md_all[nbe:,...]
|
|
m,mvar,merr,mkap = simstats(mdata.transpose((1,2,0)))
|
|
|
|
tdata = zeros((len(md_all),))
|
|
b = 0
|
|
for mat in md_all:
|
|
tdata[b] = trace(mat)
|
|
b+=1
|
|
#end for
|
|
t,tvar,terr,tkap = simstats(tdata[nbe:])
|
|
|
|
try:
|
|
val,vec = eig(m)
|
|
except LinAlgError as e:
|
|
self.warn(matrix_name+' diagonalization failed!')
|
|
val,vec = None,None
|
|
#end try
|
|
|
|
mres[species_name] = obj(
|
|
matrix = m,
|
|
matrix_error = merr,
|
|
eigenvalues = val,
|
|
eigenvectors = vec,
|
|
trace = t,
|
|
trace_error = terr,
|
|
trace_data = tdata,
|
|
data = md_all
|
|
)
|
|
#end for
|
|
#end for
|
|
if self.has_energy_matrix():
|
|
nmat = self.number_matrix
|
|
emat = self.energy_matrix
|
|
for s,es in emat.items():
|
|
ns = nmat[s]
|
|
nm = ns.matrix
|
|
em = es.matrix
|
|
try:
|
|
val,vec = eig(em,nm)
|
|
except LinAlgError:
|
|
self.warn('energy matrix generalized diagonalization failed!')
|
|
val,vec = None,None
|
|
#end try
|
|
size = len(vec)
|
|
occ = zeros((size,),dtype=nm.dtype)
|
|
for i in range(size):
|
|
v = vec[:,i]
|
|
occ[i] = dot(v.conj(),dot(nm,v))
|
|
#end for
|
|
es.set(
|
|
energies = val,
|
|
occupations = occ,
|
|
energy_vectors = vec
|
|
)
|
|
#end for
|
|
#end if
|
|
del self.data
|
|
#self.write_files()
|
|
ci(ls(),gs())
|
|
#end def analyze_local_orig
|
|
|
|
|
|
def has_energy_matrix(self):
|
|
return 'energy_matrix' in self
|
|
#end def has_energy_matrix
|
|
|
|
def write_files(self,path='./'):
|
|
prefix = self.method_info.file_prefix
|
|
nm = self.number_matrix
|
|
for gname,g in nm.items():
|
|
filename = '{0}.dm1b_{1}.dat'.format(prefix,gname)
|
|
filepath = os.path.join(path,filename)
|
|
mean = g.matrix.ravel()
|
|
error = g.matrix_error.ravel()
|
|
if not self.info.complex:
|
|
savetxt(filepath,concatenate((mean,error)))
|
|
else:
|
|
savetxt(filepath,concatenate((real(mean ),imag(mean ),
|
|
real(error),imag(error))))
|
|
#end if
|
|
#end for
|
|
#end def write_files
|
|
#end class DensityMatricesAnalyzer
|
|
|
|
|
|
|
|
class DensityAnalyzerBase(HDFAnalyzer):
|
|
def __init__(self,name,nindent=0):
|
|
HDFAnalyzer.__init__(self)
|
|
self.info.set(
|
|
name = name,
|
|
structure = self.run_info.system.structure,
|
|
file_prefix = self.run_info.file_prefix,
|
|
source_path = self.run_info.source_path,
|
|
series = self.method_info.series
|
|
)
|
|
try:
|
|
self.info.xml = self.run_info.input.get(self.info.name)
|
|
except:
|
|
self.info.xml = None
|
|
#end try
|
|
#end def __init__
|
|
|
|
|
|
def write_single_density(self,name,density,density_err,format='xsf'):
|
|
if format!='xsf':
|
|
self.error('sorry, the density can only be written in xsf format for now\n you requested: {0}'.format(format))
|
|
#end if
|
|
|
|
s = self.info.structure.copy()
|
|
p = s.pos.ravel()
|
|
if p.min()>0 and p.max()<1.0:
|
|
s.pos_to_cartesian()
|
|
#end if
|
|
s.change_units('A')
|
|
cell = s.axes
|
|
|
|
f = XsfFile()
|
|
f.incorporate_structure(s)
|
|
|
|
prefix = '{0}.s{1}.{2}'.format(self.info.file_prefix,str(self.info.series).zfill(3),name)
|
|
|
|
c = 1
|
|
g = 1
|
|
t = 1
|
|
|
|
print('writing to ',self.info.source_path,prefix)
|
|
|
|
# mean
|
|
f.add_density(cell,density,centered=c,add_ghost=g)
|
|
f.write(os.path.join(self.info.source_path,prefix+'.xsf'))
|
|
|
|
# mean + errorbar
|
|
f.add_density(cell,density+density_err,centered=c,add_ghost=g)
|
|
f.write(os.path.join(self.info.source_path,prefix+'+err.xsf'))
|
|
|
|
# mean - errorbar
|
|
f.add_density(cell,density-density_err,centered=c,add_ghost=g)
|
|
f.write(os.path.join(self.info.source_path,prefix+'-err.xsf'))
|
|
#end def write_single_density
|
|
|
|
|
|
def write_density(self,format='xsf'):
|
|
self.not_implemented()
|
|
#end def write_density
|
|
#end class DensityAnalyzerBase
|
|
|
|
|
|
|
|
class SpinDensityAnalyzer(DensityAnalyzerBase):
|
|
def load_data_local(self,data=None):
|
|
if data==None:
|
|
self.error('attempted load without data')
|
|
#end if
|
|
name = self.info.name
|
|
if name in data:
|
|
hdata = data[name]
|
|
hdata._remove_hidden()
|
|
self.data = QAHDFdata()
|
|
self.data.transfer_from(hdata)
|
|
del data[name]
|
|
else:
|
|
self.info.should_remove = True
|
|
#end if
|
|
|
|
if 'grid' in self.info.xml:
|
|
g = self.info.xml.grid
|
|
else:
|
|
dr = self.info.xml.dr
|
|
g = array((ceil(sqrt(self.info.structure.axes[0].dot(self.info.structure.axes[0]))/dr[0]),
|
|
ceil(sqrt(self.info.structure.axes[1].dot(self.info.structure.axes[1]))/dr[1]),
|
|
ceil(sqrt(self.info.structure.axes[2].dot(self.info.structure.axes[2]))/dr[2])),dtype=int)
|
|
#end if
|
|
|
|
for d in self.data:
|
|
b = len(d.value)
|
|
d.value.shape = (b,g[0],g[1],g[2])
|
|
if 'value_squared' in d:
|
|
d.value_squared.shape = (b,g[0],g[1],g[2])
|
|
#end if
|
|
#end for
|
|
#end def load_data_local
|
|
|
|
|
|
def analyze_local(self):
|
|
nbe = QAanalyzer.method_info.nblocks_exclude
|
|
for group,data in self.data.items():
|
|
gdata = data.value[nbe:,...]
|
|
g = obj()
|
|
#g.mean,g.variance,g.error,g.kappa = simstats(gdata,dim=0)
|
|
g.mean,g.error = simplestats(gdata,dim=0)
|
|
self[group] = g
|
|
#end for
|
|
self.info.nblocks_exclude = nbe
|
|
#self.write_files()
|
|
#end def analyze_local
|
|
|
|
|
|
def write_files(self,path='./'):
|
|
prefix = self.method_info.file_prefix
|
|
for gname in self.data.keys():
|
|
filename = '{0}.spindensity_{1}.dat'.format(prefix,gname)
|
|
filepath = os.path.join(path,filename)
|
|
mean = self[gname].mean.ravel()
|
|
error = self[gname].error.ravel()
|
|
savetxt(filepath,concatenate((mean,error)))
|
|
#end for
|
|
#end def write_files
|
|
|
|
|
|
def write_density(self,format='xsf'):
|
|
nbe = self.info.nblocks_exclude
|
|
umean = self.u.mean
|
|
uerr = self.u.error
|
|
dmean = self.d.mean
|
|
derr = self.d.error
|
|
|
|
upd_data = self.data.u.value + self.data.d.value
|
|
umd_data = self.data.u.value - self.data.d.value
|
|
|
|
upd_mean,upd_err = simplestats(upd_data[nbe:,...],dim=0)
|
|
umd_mean,umd_err = simplestats(umd_data[nbe:,...],dim=0)
|
|
|
|
self.write_single_density('spindensity_u' ,umean ,uerr ,format)
|
|
self.write_single_density('spindensity_d' ,dmean ,derr ,format)
|
|
self.write_single_density('spindensity_u+d',upd_mean,upd_err,format)
|
|
self.write_single_density('spindensity_u-d',umd_mean,umd_err,format)
|
|
#end def write_density
|
|
#end class SpinDensityAnalyzer
|
|
|
|
|
|
|
|
|
|
class StructureFactorAnalyzer(HDFAnalyzer):
|
|
def __init__(self,name,nindent=0):
|
|
HDFAnalyzer.__init__(self)
|
|
self.info.name = name
|
|
#end def __init__
|
|
|
|
|
|
def load_data_local(self,data=None):
|
|
if data==None:
|
|
self.error('attempted load without data')
|
|
#end if
|
|
name = self.info.name
|
|
if name in data:
|
|
hdata = data[name]
|
|
hdata._remove_hidden()
|
|
self.data = QAHDFdata()
|
|
self.data.transfer_from(hdata)
|
|
del data[name]
|
|
else:
|
|
self.info.should_remove = True
|
|
#end if
|
|
#end def load_data_local
|
|
|
|
|
|
def analyze_local(self):
|
|
nbe = QAanalyzer.method_info.nblocks_exclude
|
|
for group,data in self.data.items():
|
|
gdata = data.value[nbe:,...]
|
|
g = obj()
|
|
#g.mean,g.variance,g.error,g.kappa = simstats(gdata,dim=0)
|
|
g.mean,g.error = simplestats(gdata,dim=0)
|
|
self[group] = g
|
|
#end for
|
|
self.info.nblocks_exclude = nbe
|
|
#self.write_files()
|
|
#end def analyze_local
|
|
|
|
|
|
def write_files(self,path='./'):
|
|
print(' sf write files')
|
|
prefix = self.method_info.file_prefix
|
|
for gname in self.data.keys():
|
|
filename = '{0}.structurefactor_{1}.dat'.format(prefix,gname)
|
|
filepath = os.path.join(path,filename)
|
|
mean = self[gname].mean.ravel()
|
|
error = self[gname].error.ravel()
|
|
savetxt(filepath,concatenate((mean,error)))
|
|
#end for
|
|
#end def write_files
|
|
#end class StructureFactorAnalyzer
|
|
|
|
|
|
|
|
|
|
|
|
class DensityAnalyzer(DensityAnalyzerBase):
|
|
|
|
def load_data_local(self,data=None):
|
|
if data==None:
|
|
self.error('attempted load without data')
|
|
#end if
|
|
name = self.info.name
|
|
if name in data:
|
|
hdata = data[name]
|
|
hdata._remove_hidden()
|
|
self.data = QAHDFdata()
|
|
self.data.transfer_from(hdata)
|
|
del data[name]
|
|
else:
|
|
self.info.should_remove = True
|
|
#end if
|
|
#end def load_data_local
|
|
|
|
|
|
def analyze_local(self):
|
|
nbe = QAanalyzer.method_info.nblocks_exclude
|
|
self.mean,self.error = simplestats(self.data.value[nbe:,...],dim=0)
|
|
self.info.nblocks_exclude = nbe
|
|
#end def analyze_local
|
|
|
|
|
|
def write_density(self,format='xsf'):
|
|
self.write_single_density('density',self.mean,self.error,format)
|
|
#end def write_density
|
|
#end class DensityAnalyzer
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# spacegrid code
|
|
|
|
import re
|
|
import copy
|
|
from numpy import array,floor,sqrt,zeros,prod,dot,ones,empty,min,max
|
|
from numpy import pi,sin,cos,arccos as acos,arctan2 as atan2
|
|
from numpy.linalg import inv,det
|
|
from numerics import simplestats,ndgrid,ogrid,arange,simstats
|
|
from hdfreader import HDFgroup
|
|
|
|
#simple constants
|
|
o2pi = 1./(2.*pi)
|
|
|
|
#simple functions
|
|
def is_integer(i):
|
|
return abs(floor(i)-i)<1e-6
|
|
#end def is_integer
|
|
|
|
|
|
class SpaceGridInitializer(QAobject):
|
|
def __init__(self):
|
|
self.coord = None # string
|
|
return
|
|
#end def __init__
|
|
|
|
def check_complete(self,exit_on_fail=True):
|
|
succeeded = True
|
|
for k,v in self.items():
|
|
if v==None:
|
|
succeeded=False
|
|
if exit_on_fail:
|
|
self.error(' SpaceGridInitializer.'+k+' must be provided',exit=False)
|
|
#end if
|
|
#end if
|
|
#end if
|
|
if not succeeded and exit_on_fail:
|
|
self.error(' SpaceGridInitializer is incomplete')
|
|
#end if
|
|
return succeeded
|
|
#end def check_complete
|
|
#end class SpaceGridInitializer
|
|
|
|
|
|
class SpaceGridBase(QAobject):
|
|
cnames=['cartesian','cylindrical','spherical','voronoi']
|
|
coord_s2n = dict()
|
|
coord_n2s = dict()
|
|
for i,name in enumerate(cnames):
|
|
coord_s2n[name]=i
|
|
coord_n2s[i]=name
|
|
#end for
|
|
|
|
cartesian = coord_s2n['cartesian']
|
|
cylindrical = coord_s2n['cylindrical']
|
|
spherical = coord_s2n['spherical']
|
|
voronoi = coord_s2n['voronoi']
|
|
|
|
xlabel = 0
|
|
ylabel = 1
|
|
zlabel = 2
|
|
rlabel = 3
|
|
plabel = 4
|
|
tlabel = 5
|
|
axlabel_s2n = {'x':xlabel,'y':ylabel,'z':zlabel,'r':rlabel,'phi':plabel,'theta':tlabel}
|
|
axlabel_n2s = {xlabel:'x',ylabel:'y',zlabel:'z',rlabel:'r',plabel:'phi',tlabel:'theta'}
|
|
|
|
axindex = {'x':0,'y':1,'z':2,'r':0,'phi':1,'theta':2}
|
|
|
|
quantities=['D','T','V','E','P']
|
|
|
|
def __init__(self,initobj,options):
|
|
if options==None:
|
|
options = QAobject()
|
|
options.wasNone = True
|
|
options.points = None
|
|
options.exit_on_fail = True
|
|
options.nblocks_exclude = 0
|
|
else:
|
|
if 'points' not in options:
|
|
options.points = None
|
|
if 'exit_on_fail' not in options:
|
|
options.exit_on_fail = True
|
|
if 'nblocks_exclude' not in options:
|
|
options.nblocks_exclude = 0
|
|
#end if
|
|
|
|
self.points = options.points
|
|
self.init_exit_fail = options.exit_on_fail
|
|
self.nblocks_exclude = options.nblocks_exclude
|
|
self.keep_data = True
|
|
delvars = ['init_exit_fail','keep_data']
|
|
|
|
self.coord = None # string
|
|
self.coordinate = None
|
|
self.ndomains = None
|
|
self.domain_volumes = None
|
|
self.domain_centers = None
|
|
self.nvalues_per_domain = -1
|
|
self.nblocks = -1
|
|
self.D = QAobject() #Number Density
|
|
self.T = QAobject() #Kinetic Energy Density
|
|
self.V = QAobject() #Potential Energy Density
|
|
self.E = QAobject() #Energy Density, T+V
|
|
self.P = QAobject() #Local Pressure, (Volume)*P=(2*T+V)/3
|
|
|
|
|
|
self.init_special()
|
|
|
|
if initobj==None:
|
|
return
|
|
#end if
|
|
|
|
self.DIM=3
|
|
|
|
iname = initobj.__class__.__name__
|
|
self.iname=iname
|
|
if iname==self.__class__.__name__+'Initializer':
|
|
self.init_from_initializer(initobj)
|
|
elif iname==self.__class__.__name__:
|
|
self.init_from_spacegrid(initobj)
|
|
elif iname=='HDFgroup':
|
|
self.init_from_hdfgroup(initobj)
|
|
elif iname=='XMLelement':
|
|
self.init_from_xmlelement(initobj)
|
|
else:
|
|
self.error('Spacegrid cannot be initialized from '+iname)
|
|
#end if
|
|
delvars.append('iname')
|
|
|
|
self.check_complete()
|
|
|
|
for dv in delvars:
|
|
del self[dv]
|
|
#end for
|
|
|
|
self._reset_dynamic_methods()
|
|
self._register_dynamic_methods()
|
|
return
|
|
#end def __init__
|
|
|
|
def copy(self,other):
|
|
None
|
|
#end def copy
|
|
|
|
def init_special(self):
|
|
None
|
|
#end def init_special
|
|
|
|
def init_from_initializer(self,init):
|
|
None
|
|
#end def init_from_initializer
|
|
|
|
def init_from_spacegrid(self,init):
|
|
None
|
|
#end def init_from_spacegrid
|
|
|
|
def init_from_hdfgroup(self,init):
|
|
#copy all datasets from hdf group
|
|
value_pattern = re.compile('value')
|
|
gmap_pattern = re.compile(r'gmap\d*')
|
|
for k,v in init.items():
|
|
exclude = k[0]=='_' or gmap_pattern.match(k) or value_pattern.match(k)
|
|
if not exclude:
|
|
self[k]=v
|
|
#end if
|
|
#end for
|
|
|
|
#convert 1x and 1x1 numpy arrays to just numbers
|
|
#convert Nx1 and 1xN numpy arrays to Nx arrays
|
|
array_type = type(array([]))
|
|
exclude = set(['value','value_squared'])
|
|
for k,v in self.items():
|
|
if k[0]!='_' and type(v)==array_type and k not in exclude:
|
|
sh=v.shape
|
|
ndim = len(sh)
|
|
if ndim==1 and sh[0]==1:
|
|
self[k]=v[0]
|
|
elif ndim==2:
|
|
if sh[0]==1 and sh[1]==1:
|
|
self[k]=v[0,0]
|
|
elif sh[0]==1 or sh[1]==1:
|
|
self[k]=v.reshape((sh[0]*sh[1],))
|
|
#end if
|
|
#end if
|
|
#end if
|
|
#end for
|
|
|
|
#set coord string
|
|
self.coord = SpaceGridBase.coord_n2s[self.coordinate]
|
|
|
|
#determine if chempot grid
|
|
chempot = 'min_part' in init
|
|
self.chempot = chempot
|
|
if chempot:
|
|
npvalues = self.max_part-self.min_part+1
|
|
self.npvalues = npvalues
|
|
#end if
|
|
|
|
#process the data in hdf value,value_squared
|
|
nbe = self.nblocks_exclude
|
|
nquant = self.nvalues_per_domain
|
|
ndomains = self.ndomains
|
|
nblocks,ntmp = init.value.shape
|
|
self.nblocks = nblocks
|
|
|
|
if not chempot:
|
|
value = init.value.reshape(nblocks,ndomains,nquant).transpose(2,1,0)
|
|
else:
|
|
value = init.value.reshape(nblocks,ndomains,npvalues,nquant).transpose(3,2,1,0)
|
|
#end if
|
|
value = value[...,nbe:]
|
|
|
|
(mean,var,error,kappa)=simstats(value)
|
|
quants = ['D','T','V']
|
|
iD = -1
|
|
iT = -1
|
|
iV = -1
|
|
for i in range(len(quants)):
|
|
q=quants[i]
|
|
self[q].mean = mean[i,...]
|
|
self[q].error = error[i,...]
|
|
if q=='D':
|
|
iD = i
|
|
elif q=='T':
|
|
iT = i
|
|
elif q=='V':
|
|
iV = i
|
|
else:
|
|
self.error('quantity "{}" not recognized'.format(q))
|
|
#end if
|
|
#end for
|
|
|
|
E = value[iT,...]+value[iV,...]
|
|
(mean,var,error,kappa)=simstats(E)
|
|
self.E.mean = mean
|
|
self.E.error = error
|
|
|
|
P = 2./3.*value[iT,...]+1./3.*value[iV,...]
|
|
(mean,var,error,kappa)=simstats(P)
|
|
self.P.mean = mean
|
|
self.P.error = error
|
|
|
|
|
|
#convert all quantities into true densities
|
|
ovol = 1./self.domain_volumes
|
|
sqovol = sqrt(ovol)
|
|
for q in SpaceGridBase.quantities:
|
|
self[q].mean *= ovol
|
|
self[q].error *= sqovol
|
|
#end for
|
|
|
|
#keep original data, if requested
|
|
if self.keep_data:
|
|
self.data = QAobject()
|
|
for i in range(len(quants)):
|
|
q=quants[i]
|
|
self.data[q] = value[i,...]
|
|
#end for
|
|
self.data.E = E
|
|
self.data.P = P
|
|
#end if
|
|
|
|
return
|
|
#end def init_from_hdfgroup
|
|
|
|
def init_from_xmlelement(self,init):
|
|
None
|
|
#end def init_from_xmlelement
|
|
|
|
def check_complete(self,exit_on_fail=True):
|
|
succeeded = True
|
|
for k,v in self.items():
|
|
if k[0]!='_' and v is None:
|
|
succeeded=False
|
|
if exit_on_fail:
|
|
self.error('SpaceGridBase.'+k+' must be provided',exit=False)
|
|
#end if
|
|
#end if
|
|
#end if
|
|
if not succeeded:
|
|
self.error('SpaceGrid attempted initialization from '+self.iname,exit=False)
|
|
self.error('SpaceGrid is incomplete',exit=False)
|
|
if exit_on_fail:
|
|
exit()
|
|
#end if
|
|
#end if
|
|
return succeeded
|
|
#end def check_complete
|
|
|
|
def _reset_dynamic_methods(self):
|
|
None
|
|
#end def _reset_dynamic_methods
|
|
|
|
def _unset_dynamic_methods(self):
|
|
None
|
|
#end def _unset_dynamic_methods
|
|
|
|
def add_all_attributes(self,o):
|
|
for k,v in o.__dict__.items():
|
|
if not k.startswith('_'):
|
|
vc = copy.deepcopy(v)
|
|
self._add_attribute(k,vc)
|
|
#end if
|
|
#end for
|
|
return
|
|
#end def add_all_attributes
|
|
|
|
|
|
def reorder_atomic_data(self,imap):
|
|
None
|
|
#end if
|
|
|
|
|
|
def integrate(self,quantity,domain=None):
|
|
if quantity not in SpaceGridBase.quantities:
|
|
msg = 'requested integration of quantity '+quantity+'\n'
|
|
msg +=' '+quantity+' is not a valid SpaceGrid quantity\n'
|
|
msg +=' valid quantities are:\n'
|
|
msg +=' '+str(SpaceGridBase.quantities)
|
|
self.error(msg)
|
|
#end if
|
|
dv = self.domain_volumes
|
|
if domain==None:
|
|
mean = (self[quantity].mean*dv).sum()
|
|
error = sqrt((self[quantity].error**2*dv).sum())
|
|
else:
|
|
mean = (self[quantity].mean[domain]*dv[domain]).sum()
|
|
error = sqrt((self[quantity].error[domain]**2*dv[domain]).sum())
|
|
#end if
|
|
return mean,error
|
|
#end def integrate
|
|
|
|
def integrate_data(self,quantity,*domains,**kwargs):
|
|
return_list = False
|
|
if 'domains' in kwargs:
|
|
domains = kwargs['domains']
|
|
return_list = True
|
|
#end if
|
|
if 'return_list' in kwargs:
|
|
return_list = kwargs['return_list']
|
|
#end if
|
|
if quantity not in SpaceGridBase.quantities:
|
|
msg = 'requested integration of quantity '+quantity+'\n'
|
|
msg +=' '+quantity+' is not a valid SpaceGrid quantity\n'
|
|
msg +=' valid quantities are:\n'
|
|
msg +=' '+str(SpaceGridBase.quantities)
|
|
self.error(msg)
|
|
#end if
|
|
q = self.data[quantity]
|
|
results = list()
|
|
nblocks = q.shape[-1]
|
|
qi = zeros((nblocks,))
|
|
if len(domains)==0:
|
|
for b in range(nblocks):
|
|
qi[b] = q[...,b].sum()
|
|
#end for
|
|
(mean,var,error,kappa)=simstats(qi)
|
|
else:
|
|
for domain in domains:
|
|
for b in range(nblocks):
|
|
qb = q[...,b]
|
|
qi[b] = qb[domain].sum()
|
|
#end for
|
|
(mean,var,error,kappa)=simstats(qi)
|
|
res = QAobject()
|
|
res.mean = mean
|
|
res.error = error
|
|
res.data = qi.copy()
|
|
results.append(res)
|
|
#end for
|
|
#end for
|
|
if len(domains)<2:
|
|
return mean,error
|
|
else:
|
|
if not return_list:
|
|
return tuple(results)
|
|
else:
|
|
means = list()
|
|
errors = list()
|
|
for res in results:
|
|
means.append(res.mean)
|
|
errors.append(res.error)
|
|
#end for
|
|
return means,errors
|
|
#end if
|
|
#end if
|
|
#end def integrate_data
|
|
|
|
#end class SpaceGridBase
|
|
|
|
|
|
|
|
|
|
class RectilinearGridInitializer(SpaceGridInitializer):
|
|
def __init__(self):
|
|
SpaceGridInitializer.__init__(self)
|
|
self.origin = None # 3x1 array
|
|
self.axes = None # 3x3 array
|
|
self.axlabel = None # 3x1 string list
|
|
self.axgrid = None # 3x1 string list
|
|
#end def __init__
|
|
#end class RectilinearGridInitializer
|
|
|
|
|
|
class RectilinearGrid(SpaceGridBase):
|
|
def __init__(self,initobj=None,options=None):
|
|
SpaceGridBase.__init__(self,initobj,options)
|
|
return
|
|
#end def __init__
|
|
|
|
def init_special(self):
|
|
self.origin = None # 3x1 array
|
|
self.axes = None # 3x3 array
|
|
self.axlabel = None # 3x1 string list
|
|
self.axinv = None
|
|
self.volume = None
|
|
self.dimensions = None
|
|
self.gmap = None
|
|
self.umin = None
|
|
self.umax = None
|
|
self.odu = None
|
|
self.dm = None
|
|
self.domain_uwidths = None
|
|
return
|
|
#end def init_special
|
|
|
|
def copy(self):
|
|
return RectilinearGrid(self)
|
|
#end def copy
|
|
|
|
def _reset_dynamic_methods(self):
|
|
p2d=[self.points2domains_cartesian, \
|
|
self.points2domains_cylindrical, \
|
|
self.points2domains_spherical]
|
|
self.points2domains = p2d[self.coordinate]
|
|
|
|
p2u=[self.point2unit_cartesian, \
|
|
self.point2unit_cylindrical, \
|
|
self.point2unit_spherical]
|
|
self.point2unit = p2u[self.coordinate]
|
|
return
|
|
#end def _reset_dynamic_methods
|
|
|
|
def _unset_dynamic_methods(self):
|
|
self.points2domains = None
|
|
self.point2unit = None
|
|
return
|
|
#end def _unset_dynamic_methods
|
|
|
|
def init_from_initializer(self,init):
|
|
init.check_complete()
|
|
for k,v in init.items():
|
|
if k[0]!='_':
|
|
self[k]=v
|
|
#end if
|
|
#end for
|
|
self.initialize()
|
|
return
|
|
#end def init_from_initializer
|
|
|
|
def init_from_spacegrid(self,init):
|
|
for q in SpaceGridBase.quantities:
|
|
self[q].mean = init[q].mean.copy()
|
|
self[q].error = init[q].error.copy()
|
|
#end for
|
|
array_type = type(array([1]))
|
|
exclude = set(['point2unit','points2domains','points'])
|
|
for k,v in init.items():
|
|
if k[0]!='_':
|
|
vtype = type(v)
|
|
if k in SpaceGridBase.quantities:
|
|
self[k].mean = v.mean.copy()
|
|
self[k].error = v.error.copy()
|
|
elif vtype==array_type:
|
|
self[k] = v.copy()
|
|
elif vtype==HDFgroup:
|
|
self[k] = v
|
|
elif k in exclude:
|
|
None
|
|
else:
|
|
self[k] = vtype(v)
|
|
#end if
|
|
#end for
|
|
#end for
|
|
self.points = init.points
|
|
return
|
|
#end def init_from_spacegrid
|
|
|
|
def init_from_hdfgroup(self,init):
|
|
SpaceGridBase.init_from_hdfgroup(self,init)
|
|
self.gmap=[init.gmap1,init.gmap2,init.gmap3]
|
|
#set axlabel strings
|
|
self.axlabel=list()
|
|
for d in range(self.DIM):
|
|
label = SpaceGridBase.axlabel_n2s[self.axtypes[d]]
|
|
self.axlabel.append(label)
|
|
#end for
|
|
del self.axtypes
|
|
for i in range(len(self.gmap)):
|
|
self.gmap[i]=self.gmap[i].reshape((len(self.gmap[i]),))
|
|
#end for
|
|
return
|
|
#end def init_from_hdfgroup
|
|
|
|
|
|
def init_from_xmlelement(self,init):
|
|
DIM=self.DIM
|
|
self.axlabel=list()
|
|
self.axgrid =list()
|
|
#coord
|
|
self.coord = init.coord
|
|
#origin
|
|
p1 = self.points[init.origin.p1]
|
|
if 'p2' in init.origin:
|
|
p2 = self.points[init.origin.p2]
|
|
else:
|
|
p2 = self.points['zero']
|
|
#end if
|
|
if 'fraction' in init.origin:
|
|
frac = eval(init.origin.fraction)
|
|
else:
|
|
frac = 0.0
|
|
self.origin = p1 + frac*(p2-p1)
|
|
#axes
|
|
self.axes = zeros((DIM,DIM))
|
|
for d in range(DIM):
|
|
self.error('alternative to exec needed')
|
|
#exec('axis=init.axis'+str(d+1))
|
|
p1 = self.points[axis.p1]
|
|
if 'p2' in axis:
|
|
p2 = self.points[axis.p2]
|
|
else:
|
|
p2 = self.points['zero']
|
|
#end if
|
|
if 'scale' in axis:
|
|
scale = eval(axis.scale)
|
|
else:
|
|
scale = 1.0
|
|
#end if
|
|
for dd in range(DIM):
|
|
self.axes[dd,d] = scale*(p1[dd]-p2[dd])
|
|
#end for
|
|
self.axlabel.append(axis.label)
|
|
self.axgrid.append(axis.grid)
|
|
#end for
|
|
self.initialize()
|
|
return
|
|
#end def init_from_xmlelement
|
|
|
|
def initialize(self): #like qmcpack SpaceGridBase.initialize
|
|
write=False
|
|
succeeded=True
|
|
|
|
ndomains=-1
|
|
|
|
DIM = self.DIM
|
|
|
|
coord = self.coord
|
|
origin = self.origin
|
|
axes = self.axes
|
|
axlabel = self.axlabel
|
|
axgrid = self.axgrid
|
|
del self.axgrid
|
|
|
|
|
|
|
|
ax_cartesian = ["x" , "y" , "z" ]
|
|
ax_cylindrical = ["r" , "phi" , "z" ]
|
|
ax_spherical = ["r" , "phi" , "theta"]
|
|
|
|
cmap = dict()
|
|
if(coord=="cartesian"):
|
|
for d in range(DIM):
|
|
cmap[ax_cartesian[d]]=d
|
|
axlabel[d]=ax_cartesian[d]
|
|
#end
|
|
elif(coord=="cylindrical"):
|
|
for d in range(DIM):
|
|
cmap[ax_cylindrical[d]]=d
|
|
axlabel[d]=ax_cylindrical[d]
|
|
#end
|
|
elif(coord=="spherical"):
|
|
for d in range(DIM):
|
|
cmap[ax_spherical[d]]=d
|
|
axlabel[d]=ax_spherical[d]
|
|
#end
|
|
else:
|
|
self.error(" Coordinate supplied to spacegrid must be cartesian, cylindrical, or spherical\n You provided "+coord,exit=False)
|
|
succeeded=False
|
|
#end
|
|
self.coordinate = SpaceGridBase.coord_s2n[self.coord]
|
|
coordinate = self.coordinate
|
|
|
|
|
|
#loop over spacegrid xml elements
|
|
naxes =DIM
|
|
# variables for loop
|
|
utol = 1e-5
|
|
dimensions=zeros((DIM,),dtype=int)
|
|
umin=zeros((DIM,))
|
|
umax=zeros((DIM,))
|
|
odu=zeros((DIM,))
|
|
ndu_per_interval=[None,None,None]
|
|
gmap=[None,None,None]
|
|
for dd in range(DIM):
|
|
iaxis = cmap[axlabel[dd]]
|
|
grid = axgrid[dd]
|
|
#read in the grid contents
|
|
# remove spaces inside of parentheses
|
|
inparen=False
|
|
gtmp=''
|
|
for gc in grid:
|
|
if(gc=='('):
|
|
inparen=True
|
|
gtmp+=' '
|
|
#end
|
|
if(not(inparen and gc==' ')):
|
|
gtmp+=gc
|
|
if(gc==')'):
|
|
inparen=False
|
|
gtmp+=' '
|
|
#end
|
|
#end
|
|
grid=gtmp
|
|
# break into tokens
|
|
tokens = grid.split()
|
|
if(write):
|
|
print(" grid = ",grid)
|
|
print(" tokens = ",tokens)
|
|
#end
|
|
# count the number of intervals
|
|
nintervals=0
|
|
for t in tokens:
|
|
if t[0]!='(':
|
|
nintervals+=1
|
|
#end
|
|
#end
|
|
nintervals-=1
|
|
if(write):
|
|
print(" nintervals = ",nintervals)
|
|
#end if
|
|
# allocate temporary interval variables
|
|
ndom_int = zeros((nintervals,),dtype=int)
|
|
du_int = zeros((nintervals,))
|
|
ndu_int = zeros((nintervals,),dtype=int)
|
|
# determine number of domains in each interval and the width of each domain
|
|
u1=1.0*eval(tokens[0])
|
|
umin[iaxis]=u1
|
|
if(abs(u1)>1.0000001):
|
|
self.error(" interval endpoints cannot be greater than 1\n endpoint provided: "+str(u1),exit=False)
|
|
succeeded=False
|
|
#end
|
|
is_int=False
|
|
has_paren_val=False
|
|
interval=-1
|
|
for i in range(1,len(tokens)):
|
|
if not tokens[i].startswith('('):
|
|
u2=1.0*eval(tokens[i])
|
|
umax[iaxis]=u2
|
|
if(not has_paren_val):
|
|
du_i=u2-u1
|
|
#end
|
|
has_paren_val=False
|
|
interval+=1
|
|
if(write):
|
|
print(" parsing interval ",interval," of ",nintervals)
|
|
print(" u1,u2 = ",u1,",",u2)
|
|
#end
|
|
if(u2<u1):
|
|
self.error(" interval ("+str(u1)+","+str(u2)+") is negative",exit=False)
|
|
succeeded=False
|
|
#end
|
|
if(abs(u2)>1.0000001):
|
|
self.error(" interval endpoints cannot be greater than 1\n endpoint provided: "+str(u2),exit=False)
|
|
succeeded=False
|
|
#end
|
|
if(is_int):
|
|
du_int[interval]=(u2-u1)/ndom_i
|
|
ndom_int[interval]=ndom_i
|
|
else:
|
|
du_int[interval]=du_i
|
|
ndom_int[interval]=floor((u2-u1)/du_i+.5)
|
|
if(abs(u2-u1-du_i*ndom_int[interval])>utol):
|
|
self.error(" interval ("+str(u1)+","+str(u2)+") not divisible by du="+str(du_i),exit=False)
|
|
succeeded=False
|
|
#end
|
|
#end
|
|
u1=u2
|
|
else:
|
|
has_paren_val=True
|
|
paren_val=tokens[i][1:len(tokens[i])-1]
|
|
if(write):
|
|
print(" interval spacer = ",paren_val)
|
|
#end if
|
|
is_int=tokens[i].find(".")==-1
|
|
if(is_int):
|
|
ndom_i = eval(paren_val)
|
|
du_i = -1.0
|
|
else:
|
|
ndom_i = 0
|
|
du_i = eval(paren_val)
|
|
#end
|
|
#end
|
|
#end
|
|
# find the smallest domain width
|
|
du_min=min(du_int)
|
|
odu[iaxis]=1.0/du_min
|
|
# make sure it divides into all other domain widths
|
|
for i in range(len(du_int)):
|
|
ndu_int[i]=floor(du_int[i]/du_min+.5)
|
|
if(abs(du_int[i]-ndu_int[i]*du_min)>utol):
|
|
self.error("interval {0} of axis {1} is not divisible by smallest subinterval {2}".format(i+1,iaxis+1,du_min),exit=False)
|
|
succeeded=False
|
|
#end
|
|
#end
|
|
|
|
if(write):
|
|
print(" interval breakdown")
|
|
print(" interval,ndomains,nsubdomains_per_domain")
|
|
for i in range(len(ndom_int)):
|
|
print(" ",i,",",ndom_int[i],",",ndu_int[i])
|
|
#end
|
|
#end
|
|
|
|
# set up the interval map such that gmap[u/du]==domain index
|
|
gmap[iaxis] = zeros((floor((umax[iaxis]-umin[iaxis])*odu[iaxis]+.5),),dtype=int)
|
|
n=0
|
|
nd=-1
|
|
if(write):
|
|
print(" i,j,k ax,n,nd ")
|
|
#end if
|
|
for i in range(len(ndom_int)):
|
|
for j in range(ndom_int[i]):
|
|
nd+=1
|
|
for k in range(ndu_int[i]):
|
|
gmap[iaxis][n]=nd
|
|
if(write):
|
|
print(" ",i,",",j,",",k," ",iaxis,",",n,",",nd)
|
|
#end
|
|
n+=1
|
|
#end
|
|
#end
|
|
#end
|
|
dimensions[iaxis]=nd+1
|
|
#end read in the grid contents
|
|
|
|
#save interval width information
|
|
ndom_tot=sum(ndom_int)
|
|
ndu_per_interval[iaxis] = zeros((ndom_tot,),dtype=int)
|
|
idom=0
|
|
for i in range(len(ndom_int)):
|
|
for ii in range(ndom_int[i]):
|
|
ndu_per_interval[iaxis][idom] = ndu_int[i]
|
|
idom+=1
|
|
#end
|
|
#end
|
|
#end
|
|
|
|
axinv = inv(axes)
|
|
|
|
#check that all axis grid values fall in the allowed intervals
|
|
cartmap = dict()
|
|
for d in range(DIM):
|
|
cartmap[ax_cartesian[d]]=d
|
|
#end for
|
|
for d in range(DIM):
|
|
if axlabel[d] in cartmap:
|
|
if(umin[d]<-1.0 or umax[d]>1.0):
|
|
self.error(" grid values for {0} must fall in [-1,1]\n".format(axlabel[d])+" interval provided: [{0},{1}]".format(umin[d],umax[d]),exit=False)
|
|
succeeded=False
|
|
#end if
|
|
elif(axlabel[d]=="phi"):
|
|
if(abs(umin[d])+abs(umax[d])>1.0):
|
|
self.error(" phi interval cannot be longer than 1\n interval length provided: {0}".format(abs(umin[d])+abs(umax[d])),exit=False)
|
|
succeeded=False
|
|
#end if
|
|
else:
|
|
if(umin[d]<0.0 or umax[d]>1.0):
|
|
self.error(" grid values for {0} must fall in [0,1]\n".format(axlabel[d])+" interval provided: [{0},{1}]".format(umin[d],umax[d]),exit=False)
|
|
succeeded=False
|
|
#end if
|
|
#end if
|
|
#end for
|
|
|
|
|
|
#set grid dimensions
|
|
# C/Python style indexing
|
|
dm=array([0,0,0],dtype=int)
|
|
dm[0] = dimensions[1]*dimensions[2]
|
|
dm[1] = dimensions[2]
|
|
dm[2] = 1
|
|
|
|
ndomains=prod(dimensions)
|
|
|
|
volume = abs(det(axes))*8.0#axes span only one octant
|
|
|
|
#compute domain volumes, centers, and widths
|
|
domain_volumes = zeros((ndomains,))
|
|
domain_centers = zeros((ndomains,DIM))
|
|
domain_uwidths = zeros((ndomains,DIM))
|
|
interval_centers = [None,None,None]
|
|
interval_widths = [None,None,None]
|
|
for d in range(DIM):
|
|
nintervals = len(ndu_per_interval[d])
|
|
interval_centers[d] = zeros((nintervals))
|
|
interval_widths[d] = zeros((nintervals))
|
|
interval_widths[d][0]=ndu_per_interval[d][0]/odu[d]
|
|
interval_centers[d][0]=interval_widths[d][0]/2.0+umin[d]
|
|
for i in range(1,nintervals):
|
|
interval_widths[d][i] = ndu_per_interval[d][i]/odu[d]
|
|
interval_centers[d][i] = interval_centers[d][i-1] \
|
|
+.5*(interval_widths[d][i]+interval_widths[d][i-1])
|
|
#end for
|
|
#end for
|
|
du,uc,ubc,rc = zeros((DIM,)),zeros((DIM,)),zeros((DIM,)),zeros((DIM,))
|
|
vol = -1e99
|
|
vol_tot=0.0
|
|
vscale = abs(det(axes))
|
|
|
|
for i in range(dimensions[0]):
|
|
for j in range(dimensions[1]):
|
|
for k in range(dimensions[2]):
|
|
idomain = dm[0]*i + dm[1]*j + dm[2]*k
|
|
du[0] = interval_widths[0][i]
|
|
du[1] = interval_widths[1][j]
|
|
du[2] = interval_widths[2][k]
|
|
uc[0] = interval_centers[0][i]
|
|
uc[1] = interval_centers[1][j]
|
|
uc[2] = interval_centers[2][k]
|
|
|
|
if(coordinate==SpaceGridBase.cartesian):
|
|
vol=du[0]*du[1]*du[2]
|
|
ubc=uc
|
|
elif(coordinate==SpaceGridBase.cylindrical):
|
|
uc[1]=2.0*pi*uc[1]-pi
|
|
du[1]=2.0*pi*du[1]
|
|
vol=uc[0]*du[0]*du[1]*du[2]
|
|
ubc[0]=uc[0]*cos(uc[1])
|
|
ubc[1]=uc[0]*sin(uc[1])
|
|
ubc[2]=uc[2]
|
|
elif(coordinate==SpaceGridBase.spherical):
|
|
uc[1]=2.0*pi*uc[1]-pi
|
|
du[1]=2.0*pi*du[1]
|
|
uc[2]= pi*uc[2]
|
|
du[2]= pi*du[2]
|
|
vol=(uc[0]*uc[0]+du[0]*du[0]/12.0)*du[0] \
|
|
*du[1] \
|
|
*2.0*sin(uc[2])*sin(.5*du[2])
|
|
ubc[0]=uc[0]*sin(uc[2])*cos(uc[1])
|
|
ubc[1]=uc[0]*sin(uc[2])*sin(uc[1])
|
|
ubc[2]=uc[0]*cos(uc[2])
|
|
#end if
|
|
vol*=vscale
|
|
|
|
vol_tot+=vol
|
|
|
|
rc = dot(axes,ubc) + origin
|
|
|
|
domain_volumes[idomain] = vol
|
|
for d in range(DIM):
|
|
domain_uwidths[idomain,d] = du[d]
|
|
domain_centers[idomain,d] = rc[d]
|
|
#end for
|
|
#end for
|
|
#end for
|
|
#end for
|
|
|
|
#find the actual volume of the grid
|
|
du = umax-umin
|
|
uc = .5*(umax+umin)
|
|
if coordinate==SpaceGridBase.cartesian:
|
|
vol=du[0]*du[1]*du[2]
|
|
elif coordinate==SpaceGridBase.cylindrical:
|
|
uc[1]=2.0*pi*uc[1]-pi
|
|
du[1]=2.0*pi*du[1]
|
|
vol=uc[0]*du[0]*du[1]*du[2]
|
|
elif coordinate==SpaceGridBase.spherical:
|
|
uc[1]=2.0*pi*uc[1]-pi
|
|
du[1]=2.0*pi*du[1]
|
|
uc[2]= pi*uc[2]
|
|
du[2]= pi*du[2]
|
|
vol=(uc[0]*uc[0]+du[0]*du[0]/12.0)*du[0]*du[1]*2.0*sin(uc[2])*sin(.5*du[2])
|
|
#end if
|
|
volume = vol*abs(det(axes))
|
|
|
|
for q in SpaceGridBase.quantities:
|
|
self[q].mean = zeros((ndomains,))
|
|
self[q].error = zeros((ndomains,))
|
|
#end for
|
|
|
|
#save the results
|
|
self.axinv = axinv
|
|
self.volume = volume
|
|
self.gmap = gmap
|
|
self.umin = umin
|
|
self.umax = umax
|
|
self.odu = odu
|
|
self.dm = dm
|
|
self.dimensions = dimensions
|
|
self.ndomains = ndomains
|
|
self.domain_volumes = domain_volumes
|
|
self.domain_centers = domain_centers
|
|
self.domain_uwidths = domain_uwidths
|
|
|
|
|
|
#succeeded = succeeded and check_grid()
|
|
|
|
if(self.init_exit_fail and not succeeded):
|
|
self.error(" in def initialize")
|
|
#end
|
|
|
|
return succeeded
|
|
#end def initialize
|
|
|
|
def point2unit_cartesian(self,point):
|
|
u = dot(self.axinv,(point-self.origin))
|
|
return u
|
|
#end def point2unit_cartesian
|
|
|
|
def point2unit_cylindrical(self,point):
|
|
ub = dot(self.axinv,(point-self.origin))
|
|
u=zeros((self.DIM,))
|
|
u[0] = sqrt(ub[0]*ub[0]+ub[1]*ub[1])
|
|
u[1] = atan2(ub[1],ub[0])*o2pi+.5
|
|
u[2] = ub[2]
|
|
return u
|
|
#end def point2unit_cylindrical
|
|
|
|
def point2unit_spherical(self,point):
|
|
ub = dot(self.axinv,(point-self.origin))
|
|
u=zeros((self.DIM,))
|
|
u[0] = sqrt(ub[0]*ub[0]+ub[1]*ub[1]+ub[2]*ub[2])
|
|
u[1] = atan2(ub[1],ub[0])*o2pi+.5
|
|
u[2] = acos(ub[2]/u[0])*o2pi*2.0
|
|
return u
|
|
#end def point2unit_spherical
|
|
|
|
def points2domains_cartesian(self,points,domains,points_outside):
|
|
u = zeros((self.DIM,))
|
|
iu = zeros((self.DIM,),dtype=int)
|
|
ndomains=-1
|
|
npoints,ndim = points.shape
|
|
for p in range(npoints):
|
|
u = dot(self.axinv,(points[p]-self.origin))
|
|
if (u>self.umin).all() and (u<self.umax).all():
|
|
points_outside[p]=False
|
|
iu=floor( (u-self.umin)*self.odu )
|
|
iu[0] = self.gmap[0][iu[0]]
|
|
iu[1] = self.gmap[1][iu[1]]
|
|
iu[2] = self.gmap[2][iu[2]]
|
|
ndomains+=1
|
|
domains[ndomains,0] = p
|
|
domains[ndomains,1] = dot(self.dm,iu)
|
|
#end
|
|
#end
|
|
ndomains+=1
|
|
return ndomains
|
|
#end def points2domains_cartesian
|
|
|
|
def points2domains_cylindrical(self,points,domains,points_outside):
|
|
u = zeros((self.DIM,))
|
|
iu = zeros((self.DIM,),dtype=int)
|
|
ndomains=-1
|
|
npoints,ndim = points.shape
|
|
for p in range(npoints):
|
|
ub = dot(self.axinv,(points[p]-self.origin))
|
|
u[0] = sqrt(ub[0]*ub[0]+ub[1]*ub[1])
|
|
u[1] = atan2(ub[1],ub[0])*o2pi+.5
|
|
u[2] = ub[2]
|
|
if (u>self.umin).all() and (u<self.umax).all():
|
|
points_outside[p]=False
|
|
iu=floor( (u-self.umin)*self.odu )
|
|
iu[0] = self.gmap[0][iu[0]]
|
|
iu[1] = self.gmap[1][iu[1]]
|
|
iu[2] = self.gmap[2][iu[2]]
|
|
ndomains+=1
|
|
domains[ndomains,0] = p
|
|
domains[ndomains,1] = dot(self.dm,iu)
|
|
#end
|
|
#end
|
|
ndomains+=1
|
|
return ndomains
|
|
#end def points2domains_cylindrical
|
|
|
|
def points2domains_spherical(self,points,domains,points_outside):
|
|
u = zeros((self.DIM,))
|
|
iu = zeros((self.DIM,),dtype=int)
|
|
ndomains=-1
|
|
npoints,ndim = points.shape
|
|
for p in range(npoints):
|
|
ub = dot(self.axinv,(points[p]-self.origin))
|
|
u[0] = sqrt(ub[0]*ub[0]+ub[1]*ub[1]+ub[2]*ub[2])
|
|
u[1] = atan2(ub[1],ub[0])*o2pi+.5
|
|
u[2] = acos(ub[2]/u[0])*o2pi*2.0
|
|
if (u>self.umin).all() and (u<self.umax).all():
|
|
points_outside[p]=False
|
|
iu=floor( (u-self.umin)*self.odu )
|
|
iu[0] = self.gmap[0][iu[0]]
|
|
iu[1] = self.gmap[1][iu[1]]
|
|
iu[2] = self.gmap[2][iu[2]]
|
|
ndomains+=1
|
|
domains[ndomains,0] = p
|
|
domains[ndomains,1] = dot(self.dm,iu)
|
|
#end
|
|
#end
|
|
ndomains+=1
|
|
return ndomains
|
|
#end def points2domains_spherical
|
|
|
|
|
|
def shift_origin(self,shift):
|
|
self.origin += shift
|
|
for i in range(self.domain_centers.shape[0]):
|
|
self.domain_centers[i,:] += shift
|
|
#end for
|
|
return
|
|
#end def shift_origin
|
|
|
|
|
|
def set_origin(self,origin):
|
|
self.shift_origin(origin-self.origin)
|
|
return
|
|
#end def set_origin
|
|
|
|
|
|
def interpolate_across(self,quantities,spacegrids,outside,integration=False,warn=False):
|
|
#if the grid is to be used for integration confirm that domains
|
|
# of this spacegrid subdivide source spacegrid domains
|
|
if integration:
|
|
#setup checking variables
|
|
am_cartesian = self.coordinate==Spacegrid.cartesian
|
|
am_cylindrical = self.coordinate==Spacegrid.cylindrical
|
|
am_spherical = self.coordinate==Spacegrid.spherical
|
|
fine_interval_centers = [None,None,None]
|
|
fine_interval_domains = [None,None,None]
|
|
for d in range(self.DIM):
|
|
ndu = round( (self.umax[d]-self.umin[d])*self.odu[d] )
|
|
if len(self.gmap[d])!=ndu:
|
|
self.error('ndu is different than len(gmap)')
|
|
#end if
|
|
du = 1./self.odu[d]
|
|
fine_interval_centers[d] = self.umin + .5*du + du*array(list(range(ndu)))
|
|
find_interval_domains[d] = zeros((ndu,))
|
|
#end for
|
|
#checks are done on each source spacegrid to determine interpolation compatibility
|
|
for s in spacegrids:
|
|
# all the spacegrids must have coordinate system to satisfy this
|
|
if s.coordinate!=self.coordinate:
|
|
if warn:
|
|
self.warn('SpaceGrids must have same coordinate for interpolation')
|
|
#end if
|
|
return False
|
|
#end if
|
|
# each spacegrids' axes must be int mult of this spacegrid's axes
|
|
# (this ensures that isosurface shapes conform)
|
|
tile = dot(self.axinv,s.axes)
|
|
for d in range(self.DIM):
|
|
if not is_integer(tile[d,d]):
|
|
if warn:
|
|
self.warn("source axes must be multiples of interpolant's axes")
|
|
#end if
|
|
return False
|
|
#end if
|
|
#end for
|
|
# origin must be at r=0 for cylindrical or spherical
|
|
uo = self.point2unit(s.origin)
|
|
if am_cylindrical or am_spherical:
|
|
if uo[0]>1e-6:
|
|
if warn:
|
|
self.warn('source origin must lie at interpolant r=0')
|
|
#end if
|
|
return False
|
|
#end if
|
|
#end if
|
|
# fine meshes must align
|
|
# origin must be an integer multiple of smallest dom width
|
|
if am_cylindrical:
|
|
mdims=[2]
|
|
elif am_cartesian:
|
|
mdims=[0,1,2]
|
|
else:
|
|
mdims=[]
|
|
#end if
|
|
for d in mdims:
|
|
if not is_integer(uo[d]*self.odu[d]):
|
|
if warn:
|
|
self.warn('source origin does not lie on interpolant fine mesh')
|
|
#end if
|
|
return False
|
|
#end if
|
|
#end for
|
|
# smallest dom width must be multiple of this smallest dom width
|
|
for d in range(self.DIM):
|
|
if not is_integer(self.odu[d]/s.odu[d]):
|
|
if warn:
|
|
self.warn('smallest source domain width must be a multiple of interpolants smallest domain width')
|
|
#end if
|
|
return False
|
|
#end if
|
|
#end for
|
|
# each interval along each direction for interpolant must map to only one source interval
|
|
# construct points at each fine interval center of interpolant, run them through source gmap to get interval indices
|
|
for d in range(self.DIM):
|
|
fine_interval_domains[d][:]=-2
|
|
gmlen = len(s.gmap[d])
|
|
for i in range(len(fine_interval_centers[d])):
|
|
uc = fine_interval_centers[d][i]
|
|
ind = floor((uc-s.umin[d])*s.odu[d])
|
|
if ind < gmlen:
|
|
idom=s.gmap[d][ind]
|
|
else:
|
|
idom=-1
|
|
#end if
|
|
fine_interval_domains[d][i]=idom
|
|
#end for
|
|
cind = self.gmap[d][0]
|
|
istart = 0
|
|
iend = 0
|
|
for i in range(len(self.gmap[d])):
|
|
if self.gmap[d][i]==cind:
|
|
iend+=1
|
|
else:
|
|
source_ind = fine_interval_domains[istart]
|
|
for j in range(istart+1,iend):
|
|
if fine_interval_domains[j]!=source_ind:
|
|
if warn:
|
|
self.warn('an interpolant domain must not fall on multiple source domains')
|
|
#end if
|
|
return False
|
|
#end if
|
|
#end for
|
|
istart=iend
|
|
#end if
|
|
#end for
|
|
#end for
|
|
#end for
|
|
#end if
|
|
|
|
|
|
#get the list of domains points from this grid fall in
|
|
# and interpolate requested quantities on them
|
|
domain_centers = self.domain_centers
|
|
domind = zeros((self.ndomains,2),dtype=int)
|
|
domout = ones((self.ndomains,) ,dtype=int)
|
|
for s in spacegrids:
|
|
domind[:,:] = -1
|
|
ndomin = s.points2domains(domain_centers,domind,domout)
|
|
for q in quantities:
|
|
self[q].mean[domind[0:ndomin,0]] = s[q].mean[domind[0:ndomin,1]].copy()
|
|
self[q].error[domind[0:ndomin,0]] = s[q].error[domind[0:ndomin,1]].copy()
|
|
#end for
|
|
#end for
|
|
for d in range(self.ndomains):
|
|
if domout[d]:
|
|
for q in quantities:
|
|
self[q].mean[d] = outside[q].mean
|
|
self[q].error[d] = outside[q].error
|
|
#end for
|
|
#end if
|
|
#end for
|
|
return True
|
|
#end def interpolate_across
|
|
|
|
|
|
def interpolate(self,points,quantities=None):
|
|
if quantities==None:
|
|
quantities=SpaceGridBase.quantities
|
|
#end if
|
|
npoints,ndim = points.shape
|
|
ind = empty((npoints,2),dtype=int)
|
|
out = ones((npoints,) ,dtype=int)
|
|
nin = self.points2domains(points,ind,out)
|
|
result = QAobject()
|
|
for q in quantities:
|
|
result._add_attribute(q,QAobject())
|
|
result[q].mean = zeros((npoints,))
|
|
result[q].error = zeros((npoints,))
|
|
result[q].mean[ind[0:nin,0]] = self[q].mean[ind[0:nin,1]].copy()
|
|
result[q].error[ind[0:nin,0]] = self[q].error[ind[0:nin,1]].copy()
|
|
#end for
|
|
return result
|
|
#end def interpolate
|
|
|
|
|
|
def isosurface(self,quantity,contours=5,origin=None):
|
|
if quantity not in SpaceGridBase.quantities:
|
|
self.error()
|
|
#end if
|
|
dimensions = self.dimensions
|
|
if origin==None:
|
|
points = self.domain_centers
|
|
else:
|
|
npoints,ndim = self.domain_centers.shape
|
|
points = empty((npoints,ndim))
|
|
for i in range(npoints):
|
|
points[i,:] = origin + self.domain_centers[i,:]
|
|
#end for
|
|
#end if
|
|
scalars = self[quantity].mean
|
|
name = quantity
|
|
self.plotter.isosurface(points,scalars,contours,dimensions,name)
|
|
return
|
|
#end def isosurface
|
|
|
|
|
|
def surface_slice(self,quantity,x,y,z,options=None):
|
|
if quantity not in SpaceGridBase.quantities:
|
|
self.error()
|
|
#end if
|
|
points = empty( (x.size,self.DIM) )
|
|
points[:,0] = x.ravel()
|
|
points[:,1] = y.ravel()
|
|
points[:,2] = z.ravel()
|
|
val = self.interpolate(points,[quantity])
|
|
scalars = val[quantity].mean
|
|
scalars.shape = x.shape
|
|
self.plotter.surface_slice(x,y,z,scalars,options)
|
|
return
|
|
#end def surface_slice
|
|
|
|
|
|
def plot_axes(self,color=None,radius=.025,origin=None):
|
|
if color is None:
|
|
color = (0.,0,0)
|
|
#end if
|
|
if origin is None:
|
|
origin = array([0.,0,0])
|
|
#end if
|
|
colors=array([[1.,0,0],[0,1.,0],[0,0,1.]])
|
|
for d in range(self.DIM):
|
|
a=self.axes[:,d]+origin
|
|
ax=array([-a[0],a[0]])
|
|
ay=array([-a[1],a[1]])
|
|
az=array([-a[2],a[2]])
|
|
self.plotter.plot3d(ax,ay,az,tube_radius=radius,color=tuple(colors[:,d]))
|
|
#end for
|
|
return
|
|
#end def plot_axes
|
|
|
|
def plot_box(self,color=None,radius=.025,origin=None):
|
|
if color is None:
|
|
color = (0.,0,0)
|
|
#end if
|
|
if origin is None:
|
|
origin = array([0.,0,0])
|
|
#end if
|
|
p = self.points
|
|
p1=p.cmmm+origin
|
|
p2=p.cmpm+origin
|
|
p3=p.cpmm+origin
|
|
p4=p.cppm+origin
|
|
p5=p.cmmp+origin
|
|
p6=p.cmpp+origin
|
|
p7=p.cpmp+origin
|
|
p8=p.cppp+origin
|
|
bline = array([p1,p2,p4,p3,p1,p5,p6,p8,p7,p5,p7,p3,p4,p8,p6,p2])
|
|
self.plotter.plot3d(bline[:,0],bline[:,1],bline[:,2],color=color)
|
|
return
|
|
#end def plot_box
|
|
#end class RectilinearGrid
|
|
|
|
|
|
|
|
|
|
|
|
class VoronoiGridInitializer(SpaceGridInitializer):
|
|
def __init__(self):
|
|
SpaceGridInitializer.__init__(self)
|
|
#end def __init__
|
|
#end class VoronoiGridInitializer
|
|
|
|
|
|
class VoronoiGrid(SpaceGridBase):
|
|
def __init__(self,initobj=None,options=None):
|
|
SpaceGridBase.__init__(self,initobj,options)
|
|
return
|
|
#end def __init__
|
|
|
|
def copy(self,other):
|
|
return VoronoiGrid(other)
|
|
#end def copy
|
|
|
|
|
|
def reorder_atomic_data(self,imap):
|
|
for q in self.quantities:
|
|
qv = self[q]
|
|
qv.mean = qv.mean[...,imap]
|
|
qv.error = qv.error[...,imap]
|
|
#end for
|
|
if 'data' in self:
|
|
data = self.data
|
|
for q in self.quantities:
|
|
data[q] = data[q][...,imap,:]
|
|
#end for
|
|
#end if
|
|
#end def reorder_atomic_data
|
|
#end class VoronoiGrid
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def SpaceGrid(init,opts=None):
|
|
SpaceGrid.count+=1
|
|
|
|
iname = init.__class__.__name__
|
|
if iname=='HDFgroup':
|
|
coordinate = init.coordinate[0]
|
|
#end if
|
|
coord = SpaceGrid.coord_n2s[coordinate]
|
|
|
|
if coord in SpaceGrid.rect:
|
|
return RectilinearGrid(init,opts)
|
|
elif coord=='voronoi':
|
|
return VoronoiGrid(init,opts)
|
|
else:
|
|
print('SpaceGrid '+coord+' has not been implemented, exiting...')
|
|
exit()
|
|
#end if
|
|
|
|
#end def SpaceGrid
|
|
SpaceGrid.count = 0
|
|
SpaceGrid.coord_n2s = SpaceGridBase.coord_n2s
|
|
SpaceGrid.rect = set(['cartesian','cylindrical','spherical'])
|
|
|