Add ddb methods to understand if Becs and dielectric tensor are available.

This commit is contained in:
Matteo Giantomassi 2017-12-17 19:31:19 +01:00
parent 0e05e20c35
commit 75eb1491b0
12 changed files with 395 additions and 117 deletions

2
.gitignore vendored
View File

@ -1,7 +1,7 @@
# abipy files
abinit_vars.pickle
Profile.prof
abipy/data/runs/flow_*
abipy/examples/flows/flow_*
_debug
__abinb_workdir__
abinb_*.ipynb

View File

@ -601,7 +601,7 @@ def g0w0_convergence_inputs(structure, pseudos, kppa, nscf_nband, ecuteps, ecuts
scf_ksampling = aobj.KSampling.automatic_density(structure, kppa, chksymbreak=0, shifts=(0, 0, 0))
nscf_ksampling = aobj.KSampling.automatic_density(structure, kppa, chksymbreak=0, shifts=(0, 0, 0))
else:
# this is the original behaviour before the devellopment of the gwwrapper
# this is the original behaviour before the development of the gwwrapper
scf_ksampling = aobj.KSampling.automatic_density(structure, kppa, chksymbreak=0)
nscf_ksampling = aobj.KSampling.automatic_density(structure, kppa, chksymbreak=0)
@ -611,7 +611,6 @@ def g0w0_convergence_inputs(structure, pseudos, kppa, nscf_nband, ecuteps, ecuts
charge=charge, nband=max(nscf_nband), fband=None)
multi_scf = MultiDataset(structure, pseudos, ndtset=max(1, len(scf_diffs)))
#print(len(scf_diffs))
multi_scf.set_vars(scf_ksampling.to_abivars())
multi_scf.set_vars(scf_electrons.to_abivars())
@ -658,8 +657,7 @@ def g0w0_convergence_inputs(structure, pseudos, kppa, nscf_nband, ecuteps, ecuts
freqremin=None)
scr_inputs = []
sigma_inputs = []
print("ecuteps", ecuteps, "nscf_nband", nscf_nband)
#print("ecuteps", ecuteps, "nscf_nband", nscf_nband)
for response_model in response_models:
for ecuteps_v in ecuteps:
@ -810,7 +808,7 @@ def scf_phonons_inputs(structure, pseudos, kppa,
# Get the qpoints in the IBZ. Note that here we use a q-mesh with ngkpt=(4,4,4) and shiftk=(0,0,0)
# i.e. the same parameters used for the k-mesh in gs_inp.
qpoints = gs_inp.abiget_ibz(ngkpt=(4,4,4), shiftk=(0,0,0), kptopt=1).points
qpoints = gs_inp.abiget_ibz(ngkpt=(4, 4, 4), shiftk=(0, 0, 0), kptopt=1).points
#print("get_ibz qpoints:", qpoints)
# Build the input files for the q-points in the IBZ.

View File

@ -936,7 +936,6 @@ class Density(_DensityField):
if self.nspinor == 1:
if self.nsppol == 1:
data_dict = {"total": myrhor[0]}
if self.nsppol == 2:
data_dict = {"total": myrhor[0] + myrhor[1], "diff": myrhor[0] - myrhor[1]}

View File

@ -494,7 +494,7 @@ def as_kpoints(obj, lattice, weights=None, names=None):
class Kpoint(SlotPickleMixin):
"""
Class defining one k-point.
Class defining one k-point. This object is immutable and can be used as key in dictionaries
"""
__slots__ = [
@ -570,7 +570,7 @@ class Kpoint(SlotPickleMixin):
"""Set the weight of the k-point."""
self._weight = weight
@property
@lazy_property
def cart_coords(self):
"""Cartesian coordinates of the k-point."""
return self.lattice.get_cartesian_coords(self.frac_coords)
@ -586,7 +586,7 @@ class Kpoint(SlotPickleMixin):
if name is not None and name.startswith("\\"): name = "$" + name + "$"
self._name = name
@property
@lazy_property
def on_border(self):
"""
True if the k-point is on the border of the BZ (lattice translations are taken into account).
@ -599,6 +599,10 @@ class Kpoint(SlotPickleMixin):
return "[%+.3f, %+.3f, %+.3f]" % tuple(self.frac_coords)
def __str__(self):
return self.to_string()
def to_string(self, verbose=0):
"""String representation."""
s = "[%+.3f, %+.3f, %+.3f]" % tuple(self.frac_coords)
if self.name is not None: s += ", name: %s" % self.name
if self._weight is not None: s += ", weight: %.3f" % self.weight
@ -651,7 +655,20 @@ class Kpoint(SlotPickleMixin):
return self.__class__(self.frac_coords.copy(), self.lattice.copy(),
weight=self.weight, name=self.name)
@property
def is_gamma(self, allow_umklapp=False, atol=None):
"""
Return True if this the gamma point.
Args:
allow_umklapp: True if umklapp G-vectors are allowed.
atol: Absolute tolerance for k-point comparison (used only if umklapp).
"""
if not allow_umklapp:
return np.all(self.frac_coords == 0.0)
else:
return issamek(self.frac_coords, [0, 0, 0], atol=atol)
@lazy_property
def norm(self):
"""Norm of the kpoint."""
return np.sqrt(np.dot(self.cart_coords, self.cart_coords))
@ -787,7 +804,11 @@ class KpointList(collections.Sequence):
def to_string(self, func=str, title=None, verbose=0):
"""String representation."""
return "\n".join("%d) %s" % (i, func(kpoint)) for i, kpoint in enumerate(self))
lines = []; app = lines.append
if title is not None: app(marquee(title, mark="="))
lines.extend(["%d) %s" % (i, func(kpoint)) for i, kpoint in enumerate(self)])
return "\n".join(lines)
# Sequence protocol.
def __len__(self):

View File

@ -86,6 +86,12 @@ class TestKpoint(AbipyTest):
X = Kpoint([0.5, 0, 0], lattice)
K = Kpoint([1/3, 1/3, 1/3], lattice)
repr(X); str(X)
assert X.to_string(verbose=2)
assert gamma.is_gamma()
assert not pgamma.is_gamma()
assert pgamma.is_gamma(allow_umklapp=True)
assert not X.is_gamma()
# TODO
#assert np.all(np.array(X) == X.frac_coords)

View File

@ -5,9 +5,11 @@ from __future__ import print_function, division, unicode_literals, absolute_impo
import sys
import os
import tempfile
import itertools
import numpy as np
from collections import OrderedDict
import pandas as pd
from collections import OrderedDict
from six.moves import map, zip, StringIO
from monty.string import marquee
from monty.collections import AttrDict, dict2namedtuple, tree
@ -18,7 +20,7 @@ from abipy.flowtk import NetcdfReader, AnaddbTask
from abipy.core.mixins import TextFile, Has_Structure, NotebookWriter
from abipy.core.symmetries import AbinitSpaceGroup
from abipy.core.structure import Structure
from abipy.core.kpoints import KpointList
from abipy.core.kpoints import KpointList, Kpoint
from abipy.core.tensor import Tensor
from abipy.iotools import ETSF_Reader
from abipy.abio.inputs import AnaddbInput
@ -65,6 +67,17 @@ class DdbFile(TextFile, Has_Structure, NotebookWriter):
"""
This object provides an interface to the DDB file produced by ABINIT
as well as methods to compute phonon band structures, phonon DOS, thermodinamical properties ...
About the indices (idir, ipert) used by Abinit (Fortran notation)
* idir in [1, 2, 3] gives the direction (usually reduced direction)
* ipert in [1, 2, ..., mpert] where mpert = natom + 6
* ipert in [1, ..., natom] corresponds to atomic perturbations
* ipert = natom + 1 gives d/dk
* ipert = natom + 2 gives the electric field
* ipert = natom + 3 gives the uniaxial stress
* ipert = natom + 4 gives the shear stree.
"""
Error = DdbError
AnaddbError = AnaddbError
@ -102,10 +115,15 @@ class DdbFile(TextFile, Has_Structure, NotebookWriter):
app("")
app(self.structure.to_string(verbose=verbose, title="Structure"))
app("")
app(self.qpoints.to_string(verbose=verbose, title="Q-points"))
app(self.qpoints.to_string(verbose=verbose, title="Q-points in DDB"))
app("")
app("guessed_ngqpt: %s (guess for the q-mesh divisions made by AbiPy)" % self.guessed_ngqpt)
if verbose > 1:
from pprint import pformat
app(marquee("DDB Header", mark="="))
app(pformat(self.header))
return "\n".join(lines)
@property
@ -160,14 +178,18 @@ class DdbFile(TextFile, Has_Structure, NotebookWriter):
tokens = line.split()
key = None
try:
float(tokens[0])
parse = float if "." in tokens[0] else int
keyvals[-1][1].extend(list(map(parse, tokens)))
except ValueError:
# We have a new key
key = tokens.pop(0)
parse = float if "." in tokens[0] else int
keyvals.append((key, list(map(parse, tokens))))
try:
float(tokens[0])
parse = float if "." in tokens[0] else int
keyvals[-1][1].extend(list(map(parse, tokens)))
except ValueError:
# We have a new key
key = tokens.pop(0)
parse = float if "." in tokens[0] else int
keyvals.append((key, list(map(parse, tokens))))
except Exception as exc:
raise RuntimeError("Exception:\n%s\nwhile parsing ddb header line:\n%s" %
(str(exc), line))
# add the potential information
for line in self:
@ -184,17 +206,30 @@ class DdbFile(TextFile, Has_Structure, NotebookWriter):
# to avoid problems with pymatgen routines that expect integral Z
# This of course will break any code for alchemical mixing.
arrays = {
"acell": dict(shape=(3, ), dtype=np.double),
"amu": dict(shape=(h.ntypat, ), dtype=np.double),
"kpt": dict(shape=(h.nkpt, 3), dtype=np.double),
"ngfft": dict(shape=(3, ), dtype=np.int),
#"occ": dict(shape=(h.nsppol, h.nkpt, h.nband), dtype=np.double),
"rprim": dict(shape=(3, 3), dtype=np.double),
"spinat": dict(shape=(h.natom, 3), dtype=np.double),
"symrel": dict(shape=(h.nsym, 3, 3), dtype=np.int),
"tnons": dict(shape=(h.nsym, 3), dtype=np.double),
"xred": dict(shape=(h.natom, 3), dtype=np.double),
"znucl": dict(shape=(-1,), dtype=np.int),
"symafm": dict(shape=(-1,), dtype=np.int),
# In principle these two quantities are double but here we convert to int
# Alchemical mixing is therefore ignored.
"znucl": dict(shape=(h.ntypat,), dtype=np.int),
"zion": dict(shape=(h.ntypat,), dtype=np.int),
"symafm": dict(shape=(h.nsym,), dtype=np.int),
"wtk": dict(shape=(h.nkpt,), dtype=np.double),
}
for k, ainfo in arrays.items():
h[k] = np.reshape(np.array(h[k], dtype=ainfo["dtype"]), ainfo["shape"])
try:
h[k] = np.reshape(np.array(h[k], dtype=ainfo["dtype"]), ainfo["shape"])
except Exception as exc:
print("While Trying to reshape", k)
raise exc
# Transpose symrel because Abinit write matrices by colums.
h.symrel = np.array([s.T for s in h.symrel])
@ -225,33 +260,64 @@ class DdbFile(TextFile, Has_Structure, NotebookWriter):
return np.reshape(qpoints, (-1,3))
@lazy_property
def computed_dynmat(self):
# TODO: Create mapping [(idir1, ipert1), (idir2, ipert2)] --> element
df_columns = "idir1 ipert1 idir2 ipert2 cvalue".split()
dynmat = OrderedDict()
for block in self.blocks:
qpt = Kpoint(frac_coords=block["qpt"], lattice=self.structure.reciprocal_lattice, weight=None, name=None)
#print("qtp", qpt)
#print("data", data)
df_rows, df_index = [], []
for line in block["data"]:
line = line.strip()
if line.startswith("2nd derivatives") or line.startswith("qpt"):
continue
#print(line)
toks = line.split()
idir1, ipert1 = p1 = (int(toks[0]), int(toks[1]))
idir2, ipert2 = p2 = (int(toks[2]), int(toks[3]))
toks[4] = toks[4].replace("D", "E")
toks[5] = toks[5].replace("D", "E")
cvalue = float(toks[4]) + 1j*float(toks[5])
df_index.append(p1 + p2)
df_rows.append(dict(idir1=idir1, ipert1=ipert1, idir2=idir2, ipert2=ipert2, cvalue=cvalue))
# TODO: Create mapping [(idir1, ipert1), (idir2, ipert2)] --> element
df = pd.DataFrame(df_rows, index=df_index, columns=df_columns)
dynmat[qpt] = df
return dynmat
@lazy_property
def blocks(self):
"""
DDB blocks. List of dictionaries, Each dictionary contains "qpt"
with the reduced coordinates of the q-point and "data" that is a list of strings
with the entries of the dynamical matrix for this q-point.
DDB blocks. List of dictionaries, Each dictionary contains the following keys.
"qpt" with the reduced coordinates of the q-point.
"data" that is a list of strings with the entries of the dynamical matrix for this q-point.
"""
return self._read_blocks()
def _read_blocks(self):
self.seek(0)
# skip until the beginning of the db
self.seek(0)
while "Number of data blocks" not in self._file.readline():
pass
blocks = []
block_lines = []
qpt = None
for line in self:
# skip empty lines
if line.isspace():
continue
if "List of bloks and their characteristics" in line:
#add last block
# add last block when we reach the last part of the file.
blocks.append({"data": block_lines, "qpt": qpt})
break
@ -267,8 +333,6 @@ class DdbFile(TextFile, Has_Structure, NotebookWriter):
if "qpt" in line:
qpt = list(map(float, line.split()[1:4]))
# TODO: Create mapping [(idir1, ipert1), (idir2, ipert2)] --> element
return blocks
@property
@ -276,6 +340,15 @@ class DdbFile(TextFile, Has_Structure, NotebookWriter):
""":class:`KpointList` object with the list of q-points in reduced coordinates."""
return self._qpoints
def has_qpoint(self, qpoint):
"""True if the DDB file contains this q-point."""
#qpoint = Kpoint.as_kpoint(qpoint, self.structure.reciprocal_lattice)
try:
self.qpoints.index(qpoint)
return True
except ValueError:
return False
def qindex(self, qpoint):
"""
The index of the q-point in the internal list of q-points.
@ -331,37 +404,72 @@ class DdbFile(TextFile, Has_Structure, NotebookWriter):
"""Dictionary with the parameters that are usually tested for convergence."""
return {k: v for k, v in self.header.items() if k in ("nkpt", "nsppol", "ecut", "tsmear", "ixc")}
# TODO
# API to understand if the DDB contains the info we are looking for.
# NB: This requires the parsing of the dynamical matrix
#def has_phonon_terms(self, qpoint)
# """True if the DDB file contains info on the phonon perturbation."""
def has_lo_to_data(self, select="at_least_one"):
"""
True if the DDB file contains data requires to compute LO-TO splitting.
"""
return self.has_emacro_terms(select=select) and self.has_bec_terms(select=select)
#def has_emacro_terms(self, ij="at_least_one"):
# """
# True if the DDB file contains info on the electric-field perturbation.
def has_emacro_terms(self, select="at_least_one"):
"""
True if the DDB file contains info on the electric-field perturbation.
# Args:
# ij: Possible values in ["at_least_one", "all"] or tuple e.g. (0, 1)
# If ij == "at_least_one", we check if there's at least one entry associated to the electric field.
# and we assume that anaddb will be able to reconstruct the full tensor by symmetry.
# If ij == "all", all tensor components must be present in the DDB file.
# If ij == (0, 1), the method returns False if the (0, 1) component of the tensor is not present in the DDB.
# """
Args:
select: Possible values in ["at_least_one", "all"] or tuple e.g. (0, 1)
If select == "at_least_one", we check if there's at least one entry associated to the electric field.
and we assume that anaddb will be able to reconstruct the full tensor by symmetry.
If select == "all", all tensor components must be present in the DDB file.
If select == (0, 1), the method returns False if the (0, 1) component of the tensor is not present in the DDB.
"""
gamma = Kpoint.gamma(self.structure.reciprocal_lattice)
if gamma not in self.computed_dynmat:
return False
#def has_bec_terms(self, ij="at_least_one"):
# """
# True if the DDB file contains info on the Born effective charges.
# By default, we check if there's at least one entry associated to electric field.
# and we assume that anaddb will be able to reconstruct the full tensor by symmetry
index_set = set(self.computed_dynmat[gamma].index)
# Args:
# ij: "at_least_one", "all", (1,2)
# """
natom = len(self.structure)
ep_list = list(itertools.product(range(1, 4), [natom + 2]))
for p1 in ep_list:
for p2 in ep_list:
p12 = p1 + p2
if select == "at_least_one":
if p12 in index_set: return True
elif select == "all":
if p12 not in index_set: return False
else:
raise ValueError("Wrong select %s" % str(select))
#def has_lo_to_data(self)
# """True if the DDB file contains data requires to compute LO-TO splitting."""
# return self.has_bec_terms() and self.has_emacro_terms()
return False
def has_bec_terms(self, select="at_least_one"):
"""
True if the DDB file contains info on the Born effective charges.
By default, we check if there's at least one entry associated to atomic displacement
and electric field.
We assume that anaddb will be able to reconstruct the full tensor by symmetry
Args:
select: "at_least_one", "all", (1, 2)
"""
gamma = Kpoint.gamma(self.structure.reciprocal_lattice)
if gamma not in self.computed_dynmat:
return False
index_set = set(self.computed_dynmat[gamma].index)
natom = len(self.structure)
ep_list = list(itertools.product(range(1, 4), [natom + 2]))
ap_list = list(itertools.product(range(1, 4), range(1, natom + 1)))
for ap1 in ap_list:
for ep2 in ep_list:
p12 = ap1 + ep2
if select == "at_least_one":
if p12 in index_set: return True
elif select == "all":
if p12 not in index_set: return False
else:
raise ValueError("Wrong select %s" % str(select))
return False
def anaget_phmodes_at_qpoint(self, qpoint=None, asr=2, chneut=1, dipdip=1, workdir=None, mpi_procs=1,
manager=None, verbose=0, lo_to_splitting=False, directions=None, anaddb_kwargs=None):
@ -397,7 +505,7 @@ class DdbFile(TextFile, Has_Structure, NotebookWriter):
raise ValueError("input qpoint %s not in %s.\nddb.qpoints:\n%s" % (
qpoint, self.filepath, self.qpoints))
#if lo_to_splitting and not self.has_lo_to_data:
#if lo_to_splitting and qpoint.is_gamma() and not self.has_lo_to_data():
# cprint("lo_to_splitting set to True but Emacro and Becs are not available in DDB:" % self.filepath)
inp = AnaddbInput.modes_at_qpoint(self.structure, qpoint, asr=asr, chneut=chneut, dipdip=dipdip,
@ -422,12 +530,6 @@ class DdbFile(TextFile, Has_Structure, NotebookWriter):
return ncfile.phbands
#def anaget_phbst_file(self, ngqpt=None, ndivsm=20, asr=2, chneut=1, dipdip=1,
# workdir=None, manager=None, verbose=0, **kwargs):
#def anaget_phdos_file(self, ngqpt=None, nqsmall=10, asr=2, chneut=1, dipdip=1, dos_method="tetra"
# workdir=None, manager=None, verbose=0, **kwargs):
def anaget_phbst_and_phdos_files(self, nqsmall=10, ndivsm=20, asr=2, chneut=1, dipdip=1, dos_method="tetra",
lo_to_splitting=False, ngqpt=None, qptbounds=None, anaddb_kwargs=None, verbose=0,
mpi_procs=1, workdir=None, manager=None):
@ -457,8 +559,8 @@ class DdbFile(TextFile, Has_Structure, NotebookWriter):
"""
if ngqpt is None: ngqpt = self.guessed_ngqpt
#if lo_to_splitting and not self.has_lo_to_data:
# cprint("lo_to_splitting set to True but Emacro and Becs are not available in DDB:" % self.filepath)
if lo_to_splitting and not self.has_lo_to_data():
cprint("lo_to_splitting set to True but Emacro and Becs are not available in DDB:" % self.filepath, "yellow")
inp = AnaddbInput.phbands_and_dos(
self.structure, ngqpt=ngqpt, ndivsm=ndivsm, nqsmall=nqsmall, q1shft=(0, 0, 0), qptbounds=qptbounds,
@ -511,7 +613,6 @@ class DdbFile(TextFile, Has_Structure, NotebookWriter):
if num_cpus <= 0: num_cpus = 1
num_cpus = min(num_cpus, len(nqsmalls))
# TODO: anaget_phdos
def do_work(nqsmall):
_, phdos_file = self.anaget_phbst_and_phdos_files(
nqsmall=nqsmall, ndivsm=1, asr=asr, chneut=chneut, dipdip=dipdip, dos_method=dos_method, ngqpt=ngqpt)
@ -580,6 +681,9 @@ class DdbFile(TextFile, Has_Structure, NotebookWriter):
Return:
emacro, becs
"""
if not self.has_lo_to_data():
cprint("Dielectric tensor and Becs are not available in DDB: %s" % self.filepath, "yellow")
inp = AnaddbInput(self.structure, anaddb_kwargs={"chneut": chneut})
task = AnaddbTask.temp_shell_task(inp, ddb_node=self.filepath, mpi_procs=mpi_procs, workdir=workdir, manager=manager)
@ -598,6 +702,7 @@ class DdbFile(TextFile, Has_Structure, NotebookWriter):
with ETSF_Reader(os.path.join(task.workdir, "anaddb.nc")) as r:
structure = r.read_structure()
# TODO Replace with pymatgen tensors
emacro = Tensor.from_cartesian_tensor(r.read_value("emacro_cart"), structure.lattice, space="r"),
becs = Becs(r.read_value("becs_cart"), structure, chneut=inp["chneut"], order="f")
@ -1281,7 +1386,7 @@ class DdbRobot(Robot):
if funcs is not None: d.update(self._exec_funcs(funcs, ddb))
rows.append(d)
import pandas as pd
row_names = row_names if not abspath else self._to_relpaths(row_names)
return pd.DataFrame(rows, index=row_names, columns=list(rows[0].keys()))

View File

@ -3598,7 +3598,6 @@ def open_file_phononwebsite(filename, port=8000,
print('Press Ctrl+C to terminate HTTP server')
import webbrowser
webbrowser.get(browser).open_new(url)
#webbrowser.open_new(url)
# Quit application when SIGINT is received
import sys

View File

@ -17,7 +17,7 @@ class AnaddbNcFileTest(AbipyTest):
with AnaddbNcFile(anaddbnc_fname) as anc:
repr(anc); str(anc)
anc.to_string(verbose=1)
anc.to_string(verbose=2)
assert anc.structure.formula == "Al1 As1"
assert anc.becs is not None
assert anc.emacro is not None

View File

@ -45,6 +45,9 @@ class DdbTest(AbipyTest):
assert struct.formula == "Al1 As1"
# Test update header
# FIXME Disabled after my changes in ddb.header
# The implementation of update_header should be checked again.
"""
h_copy = ddb.header.copy()
ddb.update_header()
for k, v in ddb.header.items():
@ -56,6 +59,7 @@ class DdbTest(AbipyTest):
# err += 1
# print("line1", line1, "\nline2, line2)
#assert err == 0
"""
# Test interface with Anaddb.
print(ddb.qpoints[0])
@ -114,12 +118,29 @@ class DdbTest(AbipyTest):
"""Testing DDB for AlAs on a 4x4x4x q-mesh without Born effective charges."""
ddb = DdbFile(os.path.join(test_dir, "AlAs_444_nobecs_DDB"))
repr(ddb); str(ddb)
print(ddb.header)
assert str(ddb.header)
assert ddb.to_string(verbose=2)
assert ddb.header["nkpt"] == 256
assert ddb.header.nsym == 24 and ddb.header.ntypat == 2
self.assert_equal(ddb.header.znucl, [13, 33])
self.assert_equal(ddb.header.acell, [1, 1, 1])
self.assert_equal(ddb.header.ngfft, [10, 10, 10])
self.assert_equal(ddb.header.spinat, 0.0)
#assert ddb.header.occ.shape = (ddb.header.nsppol, ddb.header.nkpt, ddb.header.nsppol)
# TODO
#assert ddb.has_phonon_terms()
#assert not ddb.has_bec_terms()
#assert not ddb.has_emacro_terms()
assert not ddb.has_qpoint([0.345, 0.456, 0.567])
assert ddb.has_qpoint([0, 0, 0])
for qpoint in ddb.qpoints:
assert ddb.has_qpoint(qpoint)
assert ddb.has_qpoint(qpoint.frac_coords)
assert qpoint in ddb.computed_dynmat
assert len(ddb.computed_dynmat[qpoint].index[0]) == 4
assert ddb.has_bec_terms(select="at_least_one")
assert not ddb.has_bec_terms(select="all")
assert not ddb.has_emacro_terms()
assert not ddb.has_lo_to_data()
ref_qpoints = np.reshape([
0.00000000E+00, 0.00000000E+00, 0.00000000E+00,
@ -153,9 +174,9 @@ class DdbTest(AbipyTest):
# Get emacro and becs
emacro, becs = ddb.anaget_emacro_and_becs(chneut=1, verbose=1)
assert np.all(becs.values == 0)
assert np.all(becs.values == 0)
#assert np.all(emacro.values == 0)
repr(becs); str(becs)
assert becs.to_string(verbose=1)
assert becs.to_string(verbose=2)
self.assert_almost_equal(phdos.idos.values[-1], 3 * len(ddb.structure), decimal=1)
phbands_file.close()
@ -180,6 +201,54 @@ class DdbTest(AbipyTest):
ddb.close()
def test_zno_gamma_ddb_with_becs(self):
"""Testing DDB for ZnO: Gamma only, with Born effective charges and E_macro."""
with DdbFile(os.path.join(test_dir, "ZnO_gamma_becs_DDB")) as ddb:
repr(ddb); str(ddb)
assert str(ddb.header)
assert ddb.to_string(verbose=2)
assert ddb.header["nkpt"] == 486
assert ddb.header.nband == 22 and ddb.header.occopt == 1
self.assert_equal(ddb.header.typat, [1, 1, 2, 2])
assert len(ddb.header.wtk) == ddb.header.nkpt
#assert ddb.header.occ.shape = (ddb.header.nsppol, ddb.header.nkpt, ddb.header.nsppol)
assert not ddb.has_qpoint([0.345, 0.456, 0.567])
assert ddb.has_qpoint([0, 0, 0])
assert len(ddb.qpoints) == 1
for qpoint in ddb.qpoints:
assert ddb.has_qpoint(qpoint)
assert ddb.has_qpoint(qpoint.frac_coords)
assert qpoint in ddb.computed_dynmat
assert len(ddb.computed_dynmat[qpoint].index[0]) == 4
assert ddb.has_bec_terms(select="at_least_one")
assert not ddb.has_bec_terms(select="all")
assert ddb.has_emacro_terms()
assert ddb.has_lo_to_data()
# Get emacro and becs
emacro, becs = ddb.anaget_emacro_and_becs(chneut=1, verbose=1)
ref_becs_values = [
[[ 2.15646571e+00, 0.00000000e+00, 3.26402110e-25],
[ 0.00000000e+00, 2.15646571e+00, -5.46500204e-24],
[ -5.66391495e-25, -6.54012564e-25, 2.19362823e+00]],
[[ 2.15646571e+00, 0.00000000e+00, 1.19680774e-24],
[ 0.00000000e+00, 2.15646571e+00, 8.10327888e-24],
[ -1.69917448e-24, -1.30802513e-24, 2.19362823e+00]],
[[ -2.15646571e+00, 6.66133815e-16, -1.84961196e-24],
[ 8.88178420e-16, -2.15646571e+00, 2.82672519e-24],
[ -3.39834897e-24, -3.27006282e-25, -2.19362823e+00]],
[[ -2.15646571e+00, -6.66133815e-16, 3.26402110e-25],
[ -8.88178420e-16, -2.15646571e+00, -5.46500204e-24],
[ 5.66391495e-24, 2.28904397e-24, -2.19362823e+00]]
]
self.assert_almost_equal(becs.values, ref_becs_values)
#self.assert_almost_equal(emacro.values, ref_emacro_values)
repr(becs); str(becs)
assert becs.to_string(verbose=2)
class DielectricTensorGeneratorTest(AbipyTest):

View File

@ -10,17 +10,24 @@ TODO list:
* Check PJDOS in abinit@gitlab
* Add mpirun_args see e.g nic4 and mpirun --bind-to None DONE
* DONE Add mpirun_args see e.g nic4 and mpirun --bind-to None
* Re-implement max_njobs in the queque using a counter local to the Launcher. DONE
* DONE Re-implement max_njobs in the queque using a counter local to the Launcher.
* Fix annoying warnings about k-point sampling.
* Reorganize modules in flowtk to prepare future migration. Modules with gs_works, dfpt_works ...
qadapter package ...
## Medium priority
* Solve problem with visualize in jupyter notebooks (files should be produced in workdir) DONE
* DONE: Autodetect presence of data for lo_to_splitting in DDB.
* Read LO-TO data from phbbands instead of anaddb.nc
* Fix travis and sphinx warnings.
* DONE Solve problem with visualize in jupyter notebooks (files should be produced in workdir)
* Change shifts default value in g0w0_with_ppmodel_inputs
@ -28,7 +35,9 @@ TODO list:
* Fix problem with get_edos if we don't have enough bands
* Finalize interface with phononwebsite (ALMOST DONE)
* DONE Finalize interface with phononwebsite.
* Replace core.tensor with pymatgen tensor
* Add nsppol, nspinor, nspden to HIST file (and other stuff?)
@ -41,6 +50,10 @@ TODO list:
* Add memory error to Abinit errors
* Add support for https://mybinder.readthedocs.io/en/latest/sample_repos.html#conda-environment-with-environment-yml
* Remove Old workflow model. Try to reintegrate AbiPy with new abivars
## Low priority
* Use parser subclass to avoid boiler plate code.
@ -55,9 +68,11 @@ TODO list:
* plot_networkx does not work with flows containing callbacks e.g. run_qptdm_flow
* Use ax.legend(loc="best", fontsize=fontsize, shadow=True) DONE
* DONE Use ax.legend(loc="best", fontsize=fontsize, shadow=True)
* Check xsf_write_data
* Check xsf_write_data and visualization of potentials.
* Add phbands.to_bxsf
* Add treatment of out-of-boundary conditions in scissors operator.

View File

@ -344,7 +344,8 @@ closest points in this particular structure. This is usually what you want in a
# Subparser for visualize command.
p_visualize = subparsers.add_parser('visualize', parents=[copts_parser, path_selector],
help="Visualize the structure with the specified application. Requires external app or optional python modules.")
help=("Visualize the structure with the specified application. "
"Requires external app or optional python modules (mayavi, vtk)."))
p_visualize.add_argument("-a", "--appname", type=str, default="vesta",
help=("Application name. Possible options: %s, mayavi, vtk" % ", ".join(Visualizer.all_visunames())))

View File

@ -1,9 +1,7 @@
#!/usr/bin/env python
"""
This script visualizes data with external graphical applications.
It's also possible to generate movies from Abinit output files.
WARNING: This script is still under active development. API will change!
This script visualizes results with external graphical applications.
or convert data from Abinit files (usually netcdf) to other formats.
"""
from __future__ import unicode_literals, division, print_function, absolute_import
@ -13,13 +11,25 @@ import argparse
import numpy as np
from monty.functools import prof_main
from monty.termcolor import cprint
from abipy import abilab
from abipy.iotools.visualizer import Visualizer
def handle_overwrite(path, options):
"""Exit 1 if file `path` exists and not options.force else return path."""
name_parts = os.path.splitext(path)
print("Writing %s file:" % name_parts[-1].replace("." , "").upper())
if os.path.existst(path) and not options.force:
cprint("Cannot overwrite pre-existent file. Use `-f` options.", "red")
sys.exit(1)
return path
def abiview_structure(options):
"""
Visualize the structure with the specified visualizer. Requires external app or optional python modules."
Visualize the structure with the specified visualizer. Requires external app
or optional python modules (mayavi, vtk)
"""
structure = abilab.Structure.from_file(options.filepath)
print(structure)
@ -36,6 +46,10 @@ def abiview_hist(options):
print(hist.to_string(verbose=options.verbose))
if options.appname is not None:
hist.visualize(appname=options.appname)
elif options.xdatcar:
xpath = options.filepath + ".XDATCAR"
handle_overwrite(xpath, options)
hist.write_xdatcar(filepath=xpath, groupby_type=True, overwrite=True)
else:
hist.plot()
@ -70,9 +84,11 @@ def abiview_ebands(options):
"""
with abilab.abiopen(options.filepath) as abifile:
if options.xmgrace:
abifile.ebands.to_xmgrace(sys.stdout)
outpath = options.filepath + ".agr"
abifile.ebands.to_xmgrace(handle_overwrite(outpath, options))
elif options.bxsf:
abifile.ebands.to_bxsf(sys.stdout)
outpath = options.filepath + ".bxsf"
abifile.ebands.to_bxsf(handle_overwrite(outpath, options))
else:
print(abifile.to_string(verbose=options.verbose))
if abifile.ebands.kpoints.is_path:
@ -118,19 +134,30 @@ def abiview_ddb(options):
with abilab.abiopen(options.filepath) as ddb:
print(ddb.to_string(verbose=options.verbose))
# TODO: Autodetect presence of lo_to_splitting data in DDB.
nqsmall = 10; ndivsm = 20; asr = 2; chneut = 1; dipdip = 1; dos_method="tetra"
print("""
Computing phonon bands and DOS from DDB file with
nqsmall = {nqsmall}; ndivsm = {ndivsm}; asr = {asr}; chneut = {chneut}; dipdip = {dipdip}; dos_method = {dos_method}
""".format(**locals()))
# Autodetect presence of data for lo_to_splitting data in DDB.
lo_to_splitting = ddb.has_lo_to_data()
if lo_to_splitting:
print("DDB file contains Zeff and Becs, activating LO-TO computation.")
phbst, phdos = ddb.anaget_phbst_and_phdos_files(
nqsmall=10, ndivsm=20, asr=2, chneut=1, dipdip=1, dos_method="tetra",
lo_to_splitting=False, ngqpt=None, qptbounds=None, anaddb_kwargs=None,
verbose=options.verbose, mpi_procs=1)
nqsmall=nqsmall, ndivsm=ndivsm, asr=asr, chneut=chneut, dipdip=dipdip, dos_method=dos_method,
lo_to_splitting=lo_to_splitting, verbose=options.verbose, mpi_procs=1)
phbands = phbst.phbands
if options.xmgrace:
phbands.to_xmgrace(sys.stdout)
outpath = options.filepath + ".agr"
phbands.to_xmgrace(handle_overwrite(outpath, options))
return 0
#elif options.bxsf:
# phbands.to_bxsf(sys.stdout)
# outpath = options.filepath + ".bxsf"
# phbands.to_bxsf(handle_overwrite(outpath, options))
# return 0
elif options.phononwebsite:
return phbands.view_phononwebsite(browser=options.browser)
@ -147,9 +174,11 @@ def abiview_phbands(options):
"""Plot phonon bands. Accept any file with PhononBands e.g. PHBST.nc, ..."""
with abilab.abiopen(options.filepath) as abifile:
if options.xmgrace:
abifile.phbands.to_xmgrace(sys.stdout)
outpath = options.filepath + ".agr"
abifile.phbands.to_xmgrace(handle_overwrite(outpath, options))
#elif options.bxsf:
# abifile.phbands.to_bxsf(sys.stdout)
# outpath = options.filepath + ".bxsf"
# abifile.phbands.to_bxsf(handle_overwrite(outpath, options))
# return 0
elif options.phononwebsite:
return abifile.phbands.view_phononwebsite(browser=options.browser)
@ -159,6 +188,7 @@ def abiview_phbands(options):
return 0
def abiview_phdos(options):
"""Plot phonon DOS. Require PHDOS.nc file."""
with abilab.abiopen(options.filepath) as abifile:
@ -184,7 +214,17 @@ def abiview_denpot(options):
"""
with abilab.abiopen(options.filepath) as abifile:
print(abifile.to_string(verbose=options.verbose))
abifile.field.visualize(appname="vesta")
field = abifile.field
if options.chgcar:
if not field.is_density_like:
cprint("Warning: expecting DENSITY file but received %s" % field.__class__.__name__, "yellow")
chg_path = options.filepath + ".CHGCAR"
field.to_chgcar(filename=handle_overwrite(chg_path, options))
#elif options.cube:
# outpath = options.filepath + ".cube"
# field.export_to_cube(filename=handle_overwrite(outpath, options), spin="total")
else:
abifile.field.visualize(appname=options.appname)
return 0
@ -220,9 +260,11 @@ Usage example:
# Structure
###########
abiview.py structure FILE ==> Visualize structure with
abiview.py structure FILE ==> Visualize structure with Vesta (default)
abiview.py structure FILE -a xcrysden ==> Visualize structure with Xcrysden (default)
abiview.py hist out_HIST.nc ==> Plot relaxation/molecular dynamics results with matplotlib.
abiview.py hist out_HIST.nc -a ovito ==> Visualize relaxation/molecular dynamics results with ovito.
abiview.py hist out_HIST.nc -a ovito ==> Visualize relaxation/molecular dynamics results with ovito.
abiview.py hist out_HIST.nc --xdatcar ==> Convert HIST.nc into XDATCAR format (caveat: assume fixed unit cell!)
############
# Text files
@ -235,9 +277,11 @@ Usage example:
# Electrons
###########
abiview.py ebands out_GSR.nc --xmgrace ==> Generate xmgrace file.
abiview.py fatbands out_FATBANDS.nc ==> Plot electron fatbands or PJDOS depending on k-sampling.
abiview.py denpot out_DEN.nc ==> Animate densities on FFT mesh.
abiview.py ebands out_WFK.nc ==> Plot electrons bands (or DOS) with matplotlib.
abiview.py ebands out_GSR.nc --xmgrace ==> Generate xmgrace file with electron bands.
abiview.py fatbands out_FATBANDS.nc ==> Plot electron fatbands or PJDOS depending on k-sampling.
abiview.py denpot out_DEN.nc ==> Visualize density with Vesta.
abiview.py denpot out_DEN.nc --chgcar ==> Convert DEN file into CHGCAR fileformat.
#########
# Phonons
@ -251,16 +295,16 @@ Usage example:
# E-PH
#########
abiview.py eph out_EPH.nc => Plot EPH results.
abiview.py sigeph out_SIGEPH.nc => Plot Fan-Migdal self-energy.
abiview.py eph out_EPH.nc ==> Plot EPH results.
abiview.py sigeph out_SIGEPH.nc ==> Plot Fan-Migdal self-energy.
########
# GW/BSE
########
abicomp.py sigres out_SIGRES.nc => Compare multiple SIGRES files.
abicomp.py mdf out_MDF.nc --seaborn => Compare macroscopic dielectric functions.
Use seaborn settings.
abiview.py sigres out_SIGRES.nc ==> Plot QP results stored in SIGRES.
abiview.py mdf out_MDF.nc --seaborn ==> Plot macroscopic dielectric functions with excitonic effects.
Use seaborn settings.
Use `abiview.py --help` for help and `abiview.py COMMAND --help` to get the documentation for `COMMAND`.
@ -305,6 +349,9 @@ def get_parser(with_epilog=False):
p.add_argument("-b", "--browser", default=None,
help="Define browser used by python webbrowser. "
"See https://docs.python.org/2/library/webbrowser.html#webbrowser.register")
elif arg == "force":
p.add_argument("-f", '--force', default=False, action="store_true",
help="Overwrite pre-existent files without prompting for confirmation.")
else:
raise ValueError("Invalid arg: %s" % arg)
@ -319,6 +366,7 @@ def get_parser(with_epilog=False):
p_hist.add_argument("-a", "--appname", nargs="?", default=None, const="ovito",
help=("Application name. Default: ovito. "
"Possible options: `%s`, `mayavi`, `vtk`" % ", ".join(Visualizer.all_visunames())))
p_hist.add_argument("--xdatcar", default=False, action="store_true", help="Convert HIST file into XDATCAR format.")
# Subparser for abo command.
p_abo = subparsers.add_parser('abo', parents=[copts_parser], help=abiview_abo.__doc__)
@ -328,18 +376,18 @@ def get_parser(with_epilog=False):
# Subparser for ebands commands.
p_ebands = subparsers.add_parser('ebands', parents=[copts_parser], help=abiview_ebands.__doc__)
add_args(p_ebands, "xmgrace", "bxsf")
add_args(p_ebands, "xmgrace", "bxsf", "force")
# Subparser for fatbands commands.
p_fatbands = subparsers.add_parser('fatbands', parents=[copts_parser], help=abiview_fatbands.__doc__)
# Subparser for ddb command.
p_ddb = subparsers.add_parser('ddb', parents=[copts_parser], help=abiview_ddb.__doc__)
add_args(p_ddb, "xmgrace", "phononweb", "browser")
add_args(p_ddb, "xmgrace", "phononweb", "browser", "force")
# Subparser for phbands command.
p_phbands = subparsers.add_parser('phbands', parents=[copts_parser], help=abiview_phbands.__doc__)
add_args(p_phbands, "xmgrace", "phononweb", "browser")
add_args(p_phbands, "xmgrace", "phononweb", "browser", "force")
# Subparser for phdos command.
p_phdos = subparsers.add_parser('phdos', parents=[copts_parser], help=abiview_phdos.__doc__)
@ -350,8 +398,22 @@ def get_parser(with_epilog=False):
# Subparser for mdf command.
p_mdf = subparsers.add_parser('mdf', parents=[copts_parser], help=abiview_mdf.__doc__)
# Subparser for mdf command.
#p_optic = subparsers.add_parser('optic', parents=[copts_parser], help=abiview_optic.__doc__)
# Subparser for eph command.
#p_eph = subparsers.add_parser('eph', parents=[copts_parser], help=abiview_eph.__doc__)
# Subparser for sigeph command.
#p_sigeph = subparsers.add_parser('sigeph', parents=[copts_parser], help=abiview_sigeph.__doc__)
# Subparser for denpot command.
p_denpot = subparsers.add_parser('denpot', parents=[copts_parser], help=abiview_denpot.__doc__)
#p_denpot = subparsers.add_parser('denpot', parents=[copts_parser], help=abiview_denpot.__doc__)
#p_denpot.add_argument("-a", "--appname", nargs="?", default=None, const="vesta",
# help=("Application name. Default: vesta. "
# "Possible options: `%s`, `mayavi`, `vtk`" % ", ".join(Visualizer.all_visunames())))
#p_denpot.add_argument("--chgcar", default=False, action="store_true", "Convert Density to CHGCAR format.")
#p_denpot.add_argument("--cube", default=False, action="store_true", "Convert Density/Potential to CUBE format.")
return parser
@ -374,6 +436,9 @@ def main():
except Exception:
show_examples_and_exit(error_code=1)
if not options.command:
show_examples_and_exit(error_code=1)
# loglevel is bound to the string value obtained from the command line argument.
# Convert to upper case to allow the user to specify --loglevel=DEBUG or --loglevel=debug
import logging