Merge pull request #5 from abinit/gp_qha

Support for D0, D1 derivatives in DdbFile + ElasticData refactoring
This commit is contained in:
Guido Petretto 2018-07-31 10:35:22 +02:00 committed by GitHub
commit 548d8470ef
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 887 additions and 132 deletions

View File

@ -57,7 +57,7 @@ from abipy.electrons.fold2bloch import Fold2BlochNcfile
from abipy.dfpt.phonons import (PhbstFile, PhbstRobot, PhononBands, PhononBandsPlotter, PhdosFile, PhononDosPlotter,
PhdosReader, phbands_gridplot)
from abipy.dfpt.ddb import DdbFile, DdbRobot
from abipy.dfpt.anaddbnc import AnaddbNcFile
from abipy.dfpt.anaddbnc import AnaddbNcFile, AnaddbNcRobot
from abipy.dfpt.gruneisen import GrunsNcFile
from abipy.dynamics.hist import HistFile, HistRobot
from abipy.waves import WfkFile

View File

@ -70,6 +70,11 @@ class Robot(NotebookWriter):
if subcls.EXT in (ext, ext.upper()):
return subcls
# anaddb.nc does not follow the extension rule...
if ext.lower() == "anaddb":
from abipy.dfpt.anaddbnc import AnaddbNcRobot as subcls
return subcls
raise ValueError("Cannot find Robot subclass associated to extension %s\n" % ext +
"The list of supported extensions (case insensitive) is:\n%s" %
str(cls.get_supported_extensions()))
@ -153,6 +158,10 @@ class Robot(NotebookWriter):
@classmethod
def class_handles_filename(cls, filename):
"""True if robot class handles filename."""
# Special treatment of AnaddbNcRobot
if cls.EXT == "anaddb" and os.path.basename(filename).lower() == "anaddb.nc":
return True
return (filename.endswith("_" + cls.EXT + ".nc") or
filename.endswith("." + cls.EXT)) # This for .abo
@ -172,7 +181,9 @@ class Robot(NotebookWriter):
for i, f in enumerate(filenames):
try:
abifile = abiopen(f)
except Exception:
except Exception as exc:
cprint("Exception while opening file: `%s`" % str(f), "read")
cprint(exc, "read")
abifile = None
if abifile is not None:

View File

@ -43,32 +43,18 @@ def cmp_version(this, other, op=">="):
return op(parse_version(this), parse_version(other))
#TODO: Replace with abinit build and manager
def has_abinit(version=None, op=">="):
def has_abinit(version=None, op=">=", manager=None):
"""
True if abinit is in $PATH.
If version is not None, abinit version op version is evaluated and the result is returned.
False if condition is not fulfilled or the execution of ``abinit -v`` raised CalledProcessError
True if abinit is available via TaskManager configuration options.
If version is not None, `abinit_version op version` is evaluated and the result is returned.
"""
abinit = which("abinit")
if abinit is None: return False
if version is None: return abinit is not None
try:
abinit_version = str(subprocess.check_output(["abinit", "-v"]))
except subprocess.CalledProcessError:
# Some MPI implementations require the mpirunner.
try:
abinit_version = subprocess.check_output(["mpirun", "-n", "1", "abinit", "-v"])
except subprocess.CalledProcessError:
try:
abinit_version = subprocess.check_output(["mpiexec", "-n", "1", "abinit", "-v"])
except subprocess.CalledProcessError as exc:
logger.warning(exc.output)
return False
return cmp_version(abinit_version, version, op=op)
from abipy.flowtk import TaskManager, AbinitBuild
manager = TaskManager.from_user_config() if manager is None else manager
build = AbinitBuild(manager=manager)
if version is not None:
return build.version != "0.0.0"
else:
return cmp_version(build.version, version, op=op)
def has_matplotlib(version=None, op=">="):
@ -292,6 +278,12 @@ class AbipyTest(PymatgenTest):
"""Return True if abinit is in $PATH and version is op min_version."""
return has_abinit(version=version, op=op)
def skip_if_abinit_not_ge(self, version):
"""Skip test if Abinit version is not >= `version`"""
op = ">="
if not self.has_abinit(version, op=op):
raise unittest.SkipTest("This test requires Abinit version %s %s" % (op, version))
@staticmethod
def has_matplotlib(version=None, op=">="):
return has_matplotlib(version=version, op=op)

View File

@ -4,12 +4,18 @@ AnaddbNcFile provides a high-level interface to the data stored in the anaddb.nc
"""
from __future__ import print_function, division, unicode_literals, absolute_import
import os
import pandas as pd
from collections import OrderedDict
from monty.functools import lazy_property
from monty.string import marquee
from monty.termcolor import cprint
from abipy.core.tensor import Tensor
from abipy.core.mixins import AbinitNcFile, Has_Structure, NotebookWriter
from abipy.abio.robots import Robot
from abipy.iotools import ETSF_Reader
from abipy.tools.plotting import add_fig_kwargs, get_axarray_fig_plt, rotate_ticklabels, set_visible
from abipy.dfpt.phonons import InteratomicForceConstants
from abipy.dfpt.ddb import Becs
from abipy.dfpt.tensors import NLOpticalSusceptibilityTensor
@ -50,14 +56,23 @@ class AnaddbNcFile(AbinitNcFile, Has_Structure, NotebookWriter):
def __init__(self, filepath):
super(AnaddbNcFile, self).__init__(filepath)
self.reader = ETSF_Reader(filepath)
self._structure = self.reader.read_structure()
def close(self):
self.reader.close()
@lazy_property
def structure(self):
return self.reader.read_structure()
@lazy_property
def params(self):
return {}
# -666 to support old anaddb.nc files without metadata
return OrderedDict([
("asr", int(self.reader.read_value("asr", default=-666))),
("chneut", int(self.reader.read_value("chneut", default=-666))),
("dipdip", int(self.reader.read_value("dipdip", default=-666))),
("symdynmat", int(self.reader.read_value("symdynmat", default=-666))),
])
def __str__(self):
return self.to_string()
@ -76,11 +91,28 @@ class AnaddbNcFile(AbinitNcFile, Has_Structure, NotebookWriter):
app("")
app(self.structure.to_string(verbose=verbose, title="Structure"))
return "\n".join(lines)
if self.has_elastic_data:
app("")
df = self.elastic_data.get_average_elastic_dataframe(tensor="elastic_relaxed")
if not df.empty:
app(marquee("Averaged elastic properties (relaxed ions)", mark="="))
app(df.to_string(index=False))
app("")
df = self.elastic_data.get_elast_properties_dataframe()
if not df.empty:
app(marquee("Averaged elastic properties (relaxed ions)", mark="="))
app(df.T.to_string(index=True))
@property
def structure(self):
return self._structure
if verbose:
df = self.elastic_data.get_voigt_dataframe(["elastic_relaxed", "elastic_clamped"])
app(df.T.to_string())
#if self.has_piezoelectric_data:
# df = self.elastic_data.get_piezoelectric_dataframe()
#app(str(self.params))
return "\n".join(lines)
@lazy_property
def emacro(self):
@ -111,8 +143,8 @@ class AnaddbNcFile(AbinitNcFile, Has_Structure, NotebookWriter):
"""
Born effective charges. None if the file does not contain this information.
"""
chneut = self.params["chneut"]
try:
chneut = -666 # TODO: anaddb.nc should contain the input file.
return Becs(self.reader.read_value("becs_cart"), self.structure, chneut=chneut, order="f")
except Exception as exc:
print(exc, "Returning None", sep="\n")
@ -180,6 +212,24 @@ class AnaddbNcFile(AbinitNcFile, Has_Structure, NotebookWriter):
print(exc, "Oscillator strengths require dieflag == 1, 3 or 4", "Returning None", sep="\n")
return None
@lazy_property
def has_elastic_data(self):
"""True if elastic tensors have been computed."""
return self.reader.read_value("elaflag", default=0) != 0
@lazy_property
def has_piezoelectric_data(self):
"""True if piezoelectric tensors have been computed."""
return self.reader.read_value("piezoflag", default=0) != 0
@lazy_property
def elastic_data(self):
"""
Container with the different (piezo)elastic tensors computed by anaddb.
stored in pymatgen tensor objects.
"""
return ElasticData.from_ncreader(self.reader)
def yield_figs(self, **kwargs): # pragma: no cover
"""
This function *generates* a predefined list of matplotlib figures with minimal input from the user.
@ -201,6 +251,178 @@ class AnaddbNcFile(AbinitNcFile, Has_Structure, NotebookWriter):
return self._write_nb_nbpath(nb, nbpath)
@lazy_property
def elastic_data(self):
return ElasticData.from_ncreader(self.reader)
class AnaddbNcRobot(Robot):
"""
This robot analyzes the results contained in multiple anaddb.nc files.
.. rubric:: Inheritance Diagram
.. inheritance-diagram:: AnaddbNcRobot
"""
EXT = "anaddb"
@property
def has_elastic_data(self):
return all(ncfile.has_elastic_data for ncfile in self.abifiles)
def get_dataframe(self):
if self.has_elastic_data:
return self.get_elastic_dataframe()
#for ncfile in self.abifiles:
# df = ncfile.elastic_data.get_elast_properties_dataframe(etypes=["elastic_relaxed"])
# df_list.append(df)
#df_list = []
#df = pd.concat(df_list, ignore_index=True)
#df["labels"] = list(self.keys())
#df.set_index("labels", inplace=True)
#return df
def get_elastic_dataframe(self, with_geo=True, abspath=False, with_params=False, funcs=None, **kwargs):
"""
Return a |pandas-DataFrame| with properties derived from the elastic tensor
and an associated structure. Filename is used as index.
Args:
with_geo: True if structure info should be added to the dataframe
abspath: True if paths in index should be absolute. Default: Relative to getcwd().
with_params: False to exclude calculation parameters from the dataframe.
kwargs:
attrs:
List of additional attributes of the |GsrFile| to add to the DataFrame.
funcs: Function or list of functions to execute to add more data to the DataFrame.
Each function receives a |GsrFile| object and returns a tuple (key, value)
where key is a string with the name of column and value is the value to be inserted.
"""
# Add attributes specified by the users
attrs = [
#"energy", "pressure", "max_force",
#"nsppol", "nspinor", "nspden",
] + kwargs.pop("attrs", [])
rows, index = [], []
for label, ncfile in self.items():
index.append(label)
d = OrderedDict()
# Add info on structure.
if with_geo:
d.update(ncfile.structure.get_dict4pandas(with_spglib=True))
if with_params:
d.update(self.params)
# Execute functions
if funcs is not None: d.update(self._exec_funcs(funcs, ncfile))
df = ncfile.elastic_data.get_elast_properties_dataframe(etypes="elastic_relaxed")
d.update(df.to_dict("records")[0])
rows.append(d)
#index = index if not abspath else self._to_relpaths(index)
return pd.DataFrame(rows, index=index, columns=list(rows[0].keys() if rows else None))
@add_fig_kwargs
def plot_elastic_properties(self, fontsize=10, **kwargs):
"""
Args:
fontsize: legend and label fontsize.
Returns: |matplotlib-Figure|
"""
df = self.get_elastic_dataframe(with_geo=False, abspath=False, with_params=False)
from pandas.api.types import is_numeric_dtype
keys = [k for k in df.keys() if is_numeric_dtype(df[k])]
i = keys.index("fitted_to_structure")
if i != -1:
keys.pop(i)
num_plots, ncols, nrows = len(keys), 1, 1
if num_plots > 1:
ncols = 3
nrows = (num_plots // ncols) + (num_plots % ncols)
ax_list, fig, plt = get_axarray_fig_plt(None, nrows=nrows, ncols=ncols,
sharex=False, sharey=False, squeeze=False)
ax_list = ax_list.ravel()
for ix, (key, ax) in enumerate(zip(keys, ax_list)):
irow, icol = divmod(ix, ncols)
xn = range(len(df.index))
ax.plot(xn, df[key], marker="o")
ax.grid(True)
ax.set_xticks(xn)
ax.set_ylabel(key, fontsize=fontsize)
ax.set_xticklabels([])
ax.set_xticklabels(self.keys(), fontsize=fontsize)
rotate_ticklabels(ax, 15)
if ix != len(ax_list) -1:
for ix in range(ix + 1, len(ax_list)):
ax_list[ix].axis('off')
return fig
#def get_voigt_dataframe(self, tensor_names):
# ncfile.get_voigt_dataframe(self, tensor_names):
# @add_fig_kwargs
# def plot_gsr_convergence(self, sortby=None, hue=None, fontsize=6,
# items=("energy", "pressure", "max_force"), **kwargs):
# """
# Plot the convergence of the most important quantities available in the GSR file
# wrt to the ``sortby`` parameter. Values can optionally be grouped by ``hue``.
#
# Args:
# sortby: Define the convergence parameter, sort files and produce plot labels.
# Can be None, string or function. If None, no sorting is performed.
# If string and not empty it's assumed that the abifile has an attribute
# with the same name and `getattr` is invoked.
# If callable, the output of sortby(abifile) is used.
# hue: Variable that define subsets of the data, which will be drawn on separate lines.
# Accepts callable or string
# If string, it's assumed that the abifile has an attribute with the same name and getattr is invoked.
# If callable, the output of hue(abifile) is used.
# items: List of GSR attributes (or callables) to be analyzed.
# fontsize: legend and label fontsize.
#
# Returns: |matplotlib-Figure|
#
# Example:
#
# robot.plot_gsr_convergence(sortby="nkpt", hue="tsmear")
# """
# return self.plot_convergence_items(items, sortby=sortby, hue=hue, fontsize=fontsize, show=False, **kwargs)
def yield_figs(self, **kwargs): # pragma: no cover
"""
This function *generates* a predefined list of matplotlib figures with minimal input from the user.
Used in abiview.py to get a quick look at the results.
"""
if self.has_elastic_data:
yield self.plot_elastic_properties(show=False)
else:
yield None
def write_notebook(self, nbpath=None):
"""
Write a jupyter_ notebook to ``nbpath``. If nbpath is None, a temporay file in the current
working directory is created. Return path to the notebook.
"""
nbformat, nbv, nb = self.get_nbformat_nbv_nb(title=None)
args = [(l, f.filepath) for l, f in self.items()]
nb.cells.extend([
#nbv.new_markdown_cell("# This is a markdown cell"),
nbv.new_code_cell("robot = abilab.AnaddbNcRobot(*%s)\nrobot.trim_paths()\nrobot" % str(args)),
#nbv.new_code_cell("df = ebands_plotter.get_ebands_frame()\ndisplay(df)"),
])
# Mixins
nb.cells.extend(self.get_baserobot_code_cells())
return self._write_nb_nbpath(nb, nbpath)

View File

@ -8,6 +8,7 @@ import tempfile
import itertools
import numpy as np
import pandas as pd
import abipy.core.abinit_units as abu
from collections import OrderedDict
from six.moves import map, zip, StringIO
@ -27,8 +28,8 @@ from abipy.dfpt.phonons import PhononDosPlotter, PhononBandsPlotter, Interatomic
from abipy.dfpt.tensors import DielectricTensor
from abipy.dfpt.elastic import ElasticData
from abipy.core.abinit_units import phfactor_ev2units, phunit_tag #Ha_cmm1,
from pymatgen.analysis.elasticity.elastic import ElasticTensor
from pymatgen.core.units import eV_to_Ha, bohr_to_angstrom
from pymatgen.analysis.elasticity import Stress, ElasticTensor
from pymatgen.core.units import eV_to_Ha, bohr_to_angstrom, Energy
from abipy.tools.plotting import Marker, add_fig_kwargs, get_ax_fig_plt, set_axlims
from abipy.tools import duck
from abipy.abio.robots import Robot
@ -74,7 +75,7 @@ class DdbFile(TextFile, Has_Structure, NotebookWriter):
About the indices (idir, ipert) used by Abinit (Fortran notation):
* idir in [1, 2, 3] gives the direction (usually reduced direction)
* idir in [1, 2, 3] gives the direction (usually reduced direction, cart for strain)
* ipert in [1, 2, ..., mpert] where mpert = natom + 6
* ipert in [1, ..., natom] corresponds to atomic perturbations
@ -375,14 +376,31 @@ class DdbFile(TextFile, Has_Structure, NotebookWriter):
if "List of bloks and their characteristics" in line:
# add last block when we reach the last part of the file.
blocks.append({"data": block_lines, "qpt": qpt})
# This line is present only if DDB has been produced by mrgddb
if block_lines:
blocks.append({"data": block_lines, "qpt": qpt, "dord": dord})
block_lines = []
qpt = None
break
# Don't use lstring because we may reuse block_lines to write new DDB.
line = line.rstrip()
# new block
# new block --> detect order
if "# elements" in line:
if block_lines:
blocks.append({"data": block_lines, "qpt": qpt})
blocks.append({"data": block_lines, "qpt": qpt, "dord": dord})
tokens = line.split()
num_elements = int(tokens[-1])
s = " ".join(tokens[:2])
dord = {"Total energy": 0,
"1st derivatives": 1,
"2nd derivatives": 2,
"3rd derivatives": 3}.get(s, None)
if dord is None:
raise RuntimeError("Cannot detect derivative order from string: `%s`" % s)
block_lines = []
qpt = None
@ -390,6 +408,10 @@ class DdbFile(TextFile, Has_Structure, NotebookWriter):
if "qpt" in line:
qpt = list(map(float, line.split()[1:4]))
if block_lines:
blocks.append({"data": block_lines, "qpt": qpt, "dord": dord})
return blocks
@property
@ -471,6 +493,78 @@ class DdbFile(TextFile, Has_Structure, NotebookWriter):
raise TypeError("object %s does not have `params` attribute" % type(obj))
obj.params.update(self.params)
@lazy_property
def total_energy(self):
"""
Total energy in eV. None if not available.
"""
for block in self.blocks:
if block["dord"] == 0:
ene_ha = float(block["data"][1].split()[0].replace("D", "E"))
return Energy(ene_ha, "Ha").to("eV")
return None
@lazy_property
def cart_forces(self):
"""
Cartesian forces in eV / Ang
None if not available i.e. if the GS DDB has not been merged.
.. note::
These values correspond to the `fred` array in abinit
this array has *not* been corrected by enforcing
the translational symmetry, namely that the sum of force
on all atoms is not necessarly zero.
So inconsistencies with the results reported in the output file are expected.
"""
for block in self.blocks:
if block["dord"] != 1: continue
natom = len(self.structure)
fred = np.empty((natom, 3))
for line in block["data"][1:]:
idir, ipert, fval = line.split()[:3]
# F --> C
idir, ipert = int(idir) - 1, int(ipert) - 1
if ipert < natom:
fred[ipert, idir] = float(fval.replace("D", "E"))
# FIXME
gprimd = self.structure.reciprocal_lattice.matrix
fcart = - fred
return fcart * abu.Ha_eV / abu.Bohr_Ang
return None
@lazy_property
def cart_stress_tensor(self):
"""
pymatgen Stress tensor in cartesian coordinates (Hartree/Bohr^3)
None if not available.
"""
for block in self.blocks:
if block["dord"] != 1: continue
svoigt = np.empty(6)
# Abinit stress is in cart coords and Ha/Bohr**3
# Map (idir, ipert) --> voigt
uniax, shear = len(self.structure) + 3, len(self.structure) + 4
dirper2voigt = {
(1, uniax): 0,
(2, uniax): 1,
(3, uniax): 2,
(1, shear): 3,
(2, shear): 4,
(3, shear): 5}
for line in block["data"][1:]:
idir, ipert, fval = line.split()[:3]
idp = int(idir), int(ipert)
if idp in dirper2voigt:
svoigt[dirper2voigt[idp]] = float(fval.replace("D", "E"))
return Stress.from_voigt(svoigt * abu.Ha_eV / (abu.Bohr_Ang**3))
return None
def has_lo_to_data(self, select="at_least_one"):
"""
True if the DDB file contains the data required to compute the LO-TO splitting.
@ -665,8 +759,8 @@ class DdbFile(TextFile, Has_Structure, NotebookWriter):
return ncfile.phbands
def anaget_phbst_and_phdos_files(self, nqsmall=10, qppa=None, ndivsm=20, line_density=None, asr=2, chneut=1, dipdip=1,
dos_method="tetra", lo_to_splitting="automatic", ngqpt=None, qptbounds=None,
def anaget_phbst_and_phdos_files(self, nqsmall=10, qppa=None, ndivsm=20, line_density=None, asr=2, chneut=1, dipdip=1,
dos_method="tetra", lo_to_splitting="automatic", ngqpt=None, qptbounds=None,
anaddb_kwargs=None, verbose=0, spell_check=True,
mpi_procs=1, workdir=None, manager=None):
"""
@ -1081,7 +1175,7 @@ class DdbFile(TextFile, Has_Structure, NotebookWriter):
os.path.join(task.workdir, "anaddb.nc"))
def anaget_elastic(self, has_gamma_ph=False, has_dde=False, asr=2, chneut=1,
mpi_procs=1, workdir=None, manager=None, verbose=0):
mpi_procs=1, workdir=None, manager=None, verbose=0, retpath=False):
"""
Call anaddb to compute the elastic and piezoelectric properties.
@ -1094,9 +1188,10 @@ class DdbFile(TextFile, Has_Structure, NotebookWriter):
mpi_procs: Number of MPI processes to use.
workdir: Working directory. If None, a temporary directory is created.
verbose: verbosity level. Set it to a value > 0 to get more information
retpath: True to return path to anaddb.nc file.
Return:
ElasticData object
ElasticData object if retpath is None else path to anaddb.nc file.
"""
if not self.has_strain_terms():
cprint("Strain perturbations are not available in DDB: %s" % self.filepath, "yellow")
@ -1117,7 +1212,8 @@ class DdbFile(TextFile, Has_Structure, NotebookWriter):
raise self.AnaddbError(task=task, report=report)
# Read data from the netcdf output file produced by anaddb.
return ElasticData.from_file(os.path.join(task.workdir, "anaddb.nc"))
path = os.path.join(task.workdir, "anaddb.nc")
return ElasticData.from_file(path) if not retpath else path
def write(self, filepath, filter_blocks=None):
"""
@ -1721,6 +1817,16 @@ class DdbRobot(Robot):
return dict2namedtuple(phbands_plotter=phbands_plotter, phdos_plotter=phdos_plotter)
#def anaget_elastic_robot(self, manager=None):
# anaddbnc_paths = []
# for ddb in self.abifiles:
# p = ddb.anaget_elastic(has_gamma_ph=True, has_dde=False, asr=2, chneut=1,
# mpi_procs=1, workdir=None, manager=manager, verbose=0, retpath=True)
# anaddbnc_paths.append(p)
# from abipy.dfpt.anaddbnc import AnaddbNcRobot
# return AnaddbNcRobot.from_files(anaddbnc_paths)
def yield_figs(self, **kwargs): # pragma: no cover
"""
This function *generates* a predefined list of matplotlib figures with minimal input from the user.

View File

@ -1,59 +1,166 @@
# coding: utf-8
"""
AnaddbNcFile provides a high-level interface to the data stored in the anaddb.nc file.
Objects to analyze elastic and piezoelectric tensors computed by anaddb.
"""
from __future__ import print_function, division, unicode_literals, absolute_import
import sys
import pandas as pd
from collections import OrderedDict
from monty.string import is_string, list_strings, marquee
from monty.collections import AttrDict
from monty.functools import lazy_property
from pymatgen.analysis.elasticity.tensors import Tensor
from pymatgen.analysis.elasticity.elastic import ElasticTensor
from pymatgen.analysis.piezo import PiezoTensor
from abipy.core.mixins import Has_Structure
from abipy.flowtk.netcdf import ETSF_Reader
from pymatgen.analysis.elasticity.elastic import ElasticTensor
from pymatgen.analysis.elasticity.tensors import Tensor
from pymatgen.analysis.piezo import PiezoTensor
class MyElasticTensor(ElasticTensor):
def _repr_html_(self):
"""Integration with jupyter notebooks."""
return self.to_voigt_dataframe()._repr_html_()
def to_voigt_dataframe(self, tol=1e-5):
tensor = self.zeroed(tol) if tol else self
columns = ["xx", "yy", "zz", "yz", "xz", "xy"]
#columns = ["1", "2", "3", "4", "5", "6"]
rows = []
for row in tensor.voigt:
rows.append({k: v for k, v in zip(columns, row)})
return pd.DataFrame(rows, index=columns, columns=columns)
class MyPiezoTensor(PiezoTensor):
def _repr_html_(self):
"""Integration with jupyter notebooks."""
return self.to_voigt_dataframe()._repr_html_()
def to_voigt_dataframe(self, tol=1e-5):
tensor = self.zeroed(tol) if tol else self
index = ["Px", "Py", "Pz"]
columns = ["xx", "yy", "zz", "yz", "xz", "xy"]
#index = ["P1", "P2", "P3"]
#columns = ["1", "2", "3", "4", "5", "6"]
rows = []
for irow, row in enumerate(tensor.voigt):
rows.append({k: v for k, v in zip(columns, row)})
return pd.DataFrame(rows, index=index, columns=columns)
class ElasticData(Has_Structure):
"""
Handles all kind of elastic data return from abinit/anaddb relying on pymatgen tensor objects.
Container with the different elastic and piezoelectric tensors
computed by anaddb. Data is tored in pymatgen tensor objects.
Provides methods to analyze/tabule data
http://progs.coudert.name/elate/mp?query=mp-2172
"""
def __init__(self, structure, elastic_clamped=None, elastic_relaxed=None, elastic_stress_corr=None,
elastic_relaxed_fixed=None, piezo_clamped=None, piezo_relaxed=None, d_piezo_relaxed=None,
ALL_ELASTIC_TENSOR_NAMES = (
"elastic_clamped", "elastic_relaxed",
"elastic_stress_corr", "elastic_relaxed_fixed_D",
)
ALL_PIEZOELECTRIC_TENSOR_NAMES = (
"piezo_clamped", "piezo_relaxed",
"d_piezo_relaxed", "g_piezo_relaxed", "h_piezo_relaxed"
)
ALL_TENSOR_NAMES = ALL_ELASTIC_TENSOR_NAMES + ALL_PIEZOELECTRIC_TENSOR_NAMES
TYPE2NAMES = {
"all": ALL_TENSOR_NAMES,
"elastic": ALL_ELASTIC_TENSOR_NAMES,
"piezoelectric": ALL_PIEZOELECTRIC_TENSOR_NAMES,
}
TENSOR_META = {
"elastic_clamped": AttrDict(
info="clamped-ion elastic tensor in Voigt notation (shape: (6, 6))",
units="GPa"),
"elastic_relaxed": AttrDict(
info="relaxed-ion elastic tensor in Voigt notation (shape: (6, 6))",
units="GPa"),
"elastic_stress_corr": AttrDict(
info="relaxed-ion elastic tensor considering the stress left inside cell in Voigt notation (shape: (6, 6))",
units="GPa"),
"elastic_relaxed_fixed_D": AttrDict(
info="relaxed-ion elastic tensor at fixed displacement field in Voigt notation (shape: (6, 6))",
units="GPa"),
"piezo_clamped": AttrDict(
info="clamped-ion piezoelectric tensor in Voigt notation (shape: (3, 6))",
units="c/m^2"),
"piezo_relaxed": AttrDict(
info="relaxed-ion piezoelectric tensor in Voigt notation (shape: (3, 6))",
units="c/m^2"),
"d_piezo_relaxed": AttrDict(
info="relaxed-ion piezoelectric d tensor in Voigt notation (shape: (3, 6))",
units="pc/m^2"),
"g_piezo_relaxed": AttrDict(
info="relaxed-ion piezoelectric g tensor in Voigt notation (shape: (3, 6))",
units="m^2/c"),
"h_piezo_relaxed": AttrDict(
info="relaxed-ion piezoelectric h tensor in Voigt notation (shape: (3, 6))",
units="GN/c"),
}
def __init__(self, structure, params, elastic_clamped=None, elastic_relaxed=None, elastic_stress_corr=None,
elastic_relaxed_fixed_D=None, piezo_clamped=None, piezo_relaxed=None, d_piezo_relaxed=None,
g_piezo_relaxed=None, h_piezo_relaxed=None):
"""
Args:
structure: the structure
elastic_clamped: the values of a clamped-ion elastic tensor in Voigt notation (shape (6,3)) in GPa.
elastic_relaxed: the values of a relaxed-ion elastic tensor in Voigt notation (shape (6,3)) in GPa.
elastic_stress_corr: the values of a relaxed-ion elastic tensor considering the stress left inside cell
in Voigt notation (shape (6,3)) in GPa.
elastic_relaxed_fixed: the values of a relaxed-ion elastic tensor at fixed displacement field
in Voigt notation (shape (6,3)) in GPa.
piezo_clamped: the values of a clamped-ion piezoelectric tensor in Voigt notation (shape (6,3)) in c/m^2.
piezo_relaxed: the values of a relaxed-ion piezoelectric tensor in Voigt notation (shape (6,3)) in c/m^2.
d_piezo_relaxed: the values of a relaxed-ion piezoelectric d tensor in Voigt notation (shape (6,3)) in pc/m^2.
g_piezo_relaxed: the values of a relaxed-ion piezoelectric g tensor in Voigt notation (shape (6,3)) in m^2/c.
h_piezo_relaxed: the values of a relaxed-ion piezoelectric h tensor in Voigt notation (shape (6,3)) in GN/c.
structure: |Structure| object.
params: Dictionary with input parameters.
elastic_clamped: clamped-ion elastic tensor in Voigt notation (shape (6,6)) in GPa.
elastic_relaxed: relaxed-ion elastic tensor in Voigt notation (shape (6,6)) in GPa.
elastic_stress_corr: relaxed-ion elastic tensor considering the stress left inside cell
in Voigt notation (shape (6,6)) in GPa.
elastic_relaxed_fixed_D: relaxed-ion elastic tensor at fixed displacement field
in Voigt notation (shape (6,6)) in GPa.
piezo_clamped: clamped-ion piezoelectric tensor in Voigt notation (shape (3,6)) in c/m^2.
piezo_relaxed: relaxed-ion piezoelectric tensor in Voigt notation (shape (3,6)) in c/m^2.
d_piezo_relaxed: relaxed-ion piezoelectric d tensor in Voigt notation (shape (3,6)) in pc/m^2.
g_piezo_relaxed: relaxed-ion piezoelectric g tensor in Voigt notation (shape (3,6)) in m^2/c.
h_piezo_relaxed: relaxed-ion piezoelectric h tensor in Voigt notation (shape (3,6)) in GN/c.
.. note::
Arguments can be either arrays or Tensor objects.
"""
self._structure = structure
self.elastic_clamped = self._define_variable(elastic_clamped, ElasticTensor)
self.elastic_relaxed = self._define_variable(elastic_relaxed, ElasticTensor)
self.elastic_stress_corr = self._define_variable(elastic_stress_corr, ElasticTensor)
self.elastic_relaxed_fixed = self._define_variable(elastic_relaxed_fixed, ElasticTensor)
self.piezo_clamped = self._define_variable(piezo_clamped, PiezoTensor)
self.piezo_relaxed = self._define_variable(piezo_relaxed, PiezoTensor)
self.d_piezo_relaxed = self._define_variable(d_piezo_relaxed, PiezoTensor)
self.g_piezo_relaxed = self._define_variable(g_piezo_relaxed, Tensor)
self.h_piezo_relaxed = self._define_variable(h_piezo_relaxed, Tensor)
self.params = params
self.elastic_clamped = self._define_variable(elastic_clamped, MyElasticTensor)
self.elastic_relaxed = self._define_variable(elastic_relaxed, MyElasticTensor)
self.elastic_stress_corr = self._define_variable(elastic_stress_corr, MyElasticTensor)
self.elastic_relaxed_fixed_D = self._define_variable(elastic_relaxed_fixed_D, MyElasticTensor)
self.piezo_clamped = self._define_variable(piezo_clamped, MyPiezoTensor)
self.piezo_relaxed = self._define_variable(piezo_relaxed, MyPiezoTensor)
self.d_piezo_relaxed = self._define_variable(d_piezo_relaxed, MyPiezoTensor)
self.g_piezo_relaxed = self._define_variable(g_piezo_relaxed, MyPiezoTensor)
self.h_piezo_relaxed = self._define_variable(h_piezo_relaxed, MyPiezoTensor)
def _define_variable(self, tensor_voigt, tensor_class):
"""
Helper function to set values of a variable
"""
return tensor_class.from_voigt(tensor_voigt) if tensor_voigt is not None else None
if isinstance(tensor_voigt, Tensor):
if not isinstance(tensor_voigt, tensor_class):
raise TypeError("Expecting tensor class `%s`, received class `%s`" % (
tensor_class.__name__, tensor_voigt.__class__.__name__))
return tensor_voigt
else:
return tensor_class.from_voigt(tensor_voigt) if tensor_voigt is not None else None
@property
def structure(self):
"""|Structure| object."""
return self._structure
@classmethod
@ -69,28 +176,258 @@ class ElasticData(Has_Structure):
"""
Builds the object from a ETSF_Reader
"""
structure = reader.read_structure()
# [6, 6] symmetric tensors (written by Fortran)
elastic_clamped = reader.read_value("elastic_constants_clamped_ion", default=None)
elastic_relaxed = reader.read_value("elastic_constants_relaxed_ion", default=None)
elastic_stress_corr = reader.read_value("elastic_constants_relaxed_ion_stress_corrected",default=None)
elastic_relaxed_fixed = reader.read_value("elastic_tensor_relaxed_ion_fixed_D", default=None)
# [3, 6] tensors
piezo_clamped = reader.read_value("piezo_clamped_ion", default=None)
piezo_relaxed = reader.read_value("piezo_relaxed_ion", default=None)
d_piezo_relaxed = reader.read_value("d_tensor_relaxed_ion", default=None)
params = AttrDict(
# NB: asr and chneut are always present in the new anaddb.nc file
# Use -666 to support old formats.
asr=int(reader.read_value("asr", default=-666)),
chneut= int(reader.read_value("chneut", default=-666)),
elaflag=int(reader.read_value("elaflag", default=0)),
instrflag=int(reader.read_value("instrflag", default=0)),
piezoflag=int(reader.read_value("piezoflag", default=0)),
dieflag=int(reader.read_value("dieflag", default=0)),
)
# These are [6, 3] tensors written by Fortran (need to transpose)
g_piezo_relaxed = reader.read_value("g_tensor_relaxed_ion", default=None)
if g_piezo_relaxed is not None:
g_piezo_relaxed = g_piezo_relaxed.T.copy()
h_piezo_relaxed = reader.read_value("h_tensor_relaxed_ion", default=None)
if h_piezo_relaxed is not None:
h_piezo_relaxed = h_piezo_relaxed.T.copy()
ts = AttrDict({n: None for n in cls.ALL_TENSOR_NAMES})
return cls(structure=structure, elastic_clamped=elastic_clamped, elastic_relaxed=elastic_relaxed,
elastic_stress_corr=elastic_stress_corr, elastic_relaxed_fixed=elastic_relaxed_fixed,
piezo_clamped=piezo_clamped, piezo_relaxed=piezo_relaxed, d_piezo_relaxed=d_piezo_relaxed,
g_piezo_relaxed=g_piezo_relaxed, h_piezo_relaxed=h_piezo_relaxed)
# [6, 6] symmetric tensors (written by Fortran, produced in ddb_elast)
ts.elastic_clamped = reader.read_value("elastic_constants_clamped_ion", default=None)
ts.elastic_relaxed = reader.read_value("elastic_constants_relaxed_ion", default=None)
if params.elaflag == 5:
ts.elastic_stress_corr = reader.read_value("elastic_constants_relaxed_ion_stress_corrected")
# Written in ddb_piezo
ts.elastic_relaxed_fixed_D = reader.read_value("elastic_tensor_relaxed_ion_fixed_D", default=None)
# [3, 6] tensors (written by Fortran, produced in ddb_piezo).
ts.piezo_clamped = reader.read_value("piezo_clamped_ion", default=None)
ts.piezo_relaxed = reader.read_value("piezo_relaxed_ion", default=None)
ts.d_piezo_relaxed = reader.read_value("d_tensor_relaxed_ion", default=None)
# These are [6, 3] tensors written by Fortran (need to transpose).
ts.g_piezo_relaxed = reader.read_value("g_tensor_relaxed_ion", default=None)
if ts.g_piezo_relaxed is not None:
ts.g_piezo_relaxed = ts.g_piezo_relaxed.T.copy()
ts.h_piezo_relaxed = reader.read_value("h_tensor_relaxed_ion", default=None)
if ts.h_piezo_relaxed is not None:
ts.h_piezo_relaxed = ts.h_piezo_relaxed.T.copy()
return cls(structure, params, **ts)
#return cls(structure, params, elastic_clamped=elastic_clamped, elastic_relaxed=elastic_relaxed,
# elastic_stress_corr=elastic_stress_corr, elastic_relaxed_fixed_D=elastic_relaxed_fixed_D,
# piezo_clamped=piezo_clamped, piezo_relaxed=piezo_relaxed, d_piezo_relaxed=d_piezo_relaxed,
# g_piezo_relaxed=g_piezo_relaxed, h_piezo_relaxed=h_piezo_relaxed)
def __str__(self):
return self.to_string()
def to_string(self, verbose=0):
"""String represention with verbosity level `verbose`."""
lines = []; app = lines.append
app(self.structure.to_string(verbose=verbose, title="Structure"))
app("")
app(marquee("Parameters", mark="="))
import json
app(json.dumps(self.params, indent=2, sort_keys=True))
for tensor_type in ("elastic", "piezoelectric"):
name_tensor_list = self.name_tensor_list(tensor_type=tensor_type)
if name_tensor_list:
app("")
app(marquee("%s tensors available" % tensor_type, mark="="))
for name, tensor in name_tensor_list:
meta = self.TENSOR_META[name]
is_fit = tensor.is_fit_to_structure(self.structure, tol=1e-2)
# TODO
tol = dict(elastic=1e-3, piezoelectric=1e-5)[tensor_type]
app("[%s]" % name.upper())
app("%s" % meta.info)
app("Units: %s, set to zero below: %s, fit_to_structure: %s" % (meta.units, tol, is_fit))
app("")
if tensor_type == "elastic":
app(self.get_elastic_tensor_dataframe(tensor_name=name, tol=tol).to_string())
elif tensor_type == "piezoelectric":
app(self.get_piezoelectric_tensor_dataframe(tensor_name=name, tol=tol).to_string())
app("")
return "\n".join(lines)
def name_tensor_list(self, tensor_names=None, tensor_type="all"):
"""
List of (name, tensor) tuples. Only tensors stored in the object are returned.
Args:
tensor_names: List of tensor names to select. None means all.
tensor_type: Select tensors by type. Must be in ["all", "elastic", "piezoelectric"].
"""
l = []
if tensor_names is None:
for name in self.TYPE2NAMES[tensor_type]:
tensor = getattr(self, name)
if tensor is not None: l.append((name, tensor))
else:
for name in list_strings(tensor_names):
if name not in self.TYPE2NAMES[tensor_type]:
raise ValueError("tensor name %s does not belong to type: `%s`" % (name, tensor_type))
tensor = getattr(self, name)
if tensor is not None: l.append((name, tensor))
return l
def fit_to_structure(self, structure=None, symprec=0.1):
"""
Return new ElasticData object with tensors that are invariant with respect to symmetry
operations corresponding to `structure`.
Args:
structure (Structure): structure from which to generate symmetry operations
If None, the internal structure is used.
symprec (float): symmetry tolerance for the Spacegroup Analyzer
used to generate the symmetry operations
"""
structure = self.structure if structure is None else structure
kwargs = {name: tensor.fit_to_structure(structure, symprec=symprec)
for name, tensor in self.name_tensor_list()}
return self.__class__(structure, self.params, **kwargs)
def convert_to_ieee(self, structure=None, initial_fit=True, refine_rotation=True):
"""
Return new set of tensors in IEEE format according to the 1987 IEEE standards.
Args:
structure (Structure): a structure associated with the
tensor to be converted to the IEEE standard
If None, the internal structure is used
initial_fit (bool): flag to indicate whether initial
tensor is fit to the symmetry of the structure.
Defaults to true. Note that if false, inconsistent
results may be obtained due to symmetrically
equivalent, but distinct transformations
being used in different versions of spglib.
refine_rotation (bool): whether to refine the rotation
produced by the ieee transform generator, default True
"""
kwargs = {}
structure = self.structure if structure is None else structure
for name, tensor in self.name_tensor_list():
# TODO: one should use the ieee stucture.
kwargs[name] = tensor.convert_to_ieee(structure,
initial_fit=initial_fit, refine_rotation=refine_rotation)
return self.__class__(structure, self.params, **kwargs)
def get_elastic_tensor_dataframe(self, tensor_name="elastic_relaxed", tol=1e-3):
"""
Args:
tol: returns the matrix with all entries below a certain threshold
(i.e. tol) set to zero
"""
tensor = getattr(self, tensor_name)
if tensor is None: return pd.DataFrame()
if tol: tensor = tensor.zeroed(tol)
columns = ["xx", "yy", "zz", "yz", "xz", "xy"]
#columns = ["1", "2", "3", "4", "5", "6"]
rows = []
for row in tensor.voigt:
rows.append({k: v for k, v in zip(columns, row)})
return pd.DataFrame(rows, index=columns, columns=columns)
def get_piezoelectric_tensor_dataframe(self, tensor_name="piezo_relaxed", tol=1e-5):
"""
Args:
tol: returns the matrix with all entries below a certain threshold
(i.e. tol) set to zero
"""
tensor = getattr(self, tensor_name)
if tensor is None: return pd.DataFrame()
if tol: tensor = tensor.zeroed(tol)
index = ["Px", "Py", "Pz"]
columns = ["xx", "yy", "zz", "yz", "xz", "xy"]
#index = ["P1", "P2", "P3"]
#columns = ["1", "2", "3", "4", "5", "6"]
rows = []
for row in tensor.voigt:
rows.append({k: v for k, v in zip(columns, row)})
return pd.DataFrame(rows, index=index, columns=columns)
def get_voigt_dataframe(self, tensor_names):
"""
Return a |pandas-DataFrame| with voigt indices as colums.
Useful to analyze the converge of individual elements of the tensor(s)
Args:
tensor_names: List of tensor names.
"""
rows = []
for name, tensor in self.name_tensor_list(tensor_names=tensor_names):
voigt_map = tensor.get_voigt_dict(tensor.rank)
row = {}
for ind in voigt_map:
row[voigt_map[ind]] = tensor[ind]
row = OrderedDict(sorted(row.items(), key=lambda item: item[0]))
row["tensor_name"] = name
rows.append(row)
return pd.DataFrame(rows, columns=list(rows[0].keys() if rows else None))
#def get_average_elastic_dataframe(self, tensor_names="elastic_relaxed", fit_to_structure=False, symprec=0.1):
# """
# Return a |pandas-DataFrame|
# Args:
# tensor_names
# """
# schemes = ["voigt", "reuss", "vrh"]
# what_list = ["k", "g"] # "E", "v"] # TODO
# rows = []
# for name, tensor in self.name_tensor_list(tensor_names=tensor_names):
# if fit_to_structure:
# tensor = tensor.fit_to_structure(self.structure, symprec=symprec)
# for scheme in schemes:
# row = OrderedDict()
# row["scheme"] = scheme
# row["tensor_name"] = name
# for what in what_list:
# # k_vrh
# aname = what + "_" + scheme
# row[what] = getattr(tensor, aname)
# rows.append(row)
# return pd.DataFrame(rows, columns=list(rows[0].keys()) if rows else None)
def get_elast_properties_dataframe(self, tensor_names=["elastic_relaxed", "elastic_clamped"],
include_base_props=True, ignore_errors=False, fit_to_structure=False, symprec=0.1):
"""
Return a |pandas-DataFrame| with properties derived from the elastic tensor
and the associated structure
Args:
tensor_names= ["elastic_relaxed", "elastic_clamped", "elastic_stress_corr", "elastic_relaxed_fixed_D"]
include_base_props (bool): whether to include base properties, like k_vrh, etc.
ignore_errors (bool): if set to true, will set problem properties
that depend on a physical tensor to None, defaults to False
fit_to_structure (bool): If True, properties are computed with the orginal tensors
and symmetrized tensors. An additional column `fit_to_structure` is added to the dataframe.
symprec (float): symmetry tolerance for the Spacegroup Analyzer
used to generate the symmetry operations if `fit_to_structure`
"""
do_fits = [False] if not fit_to_structure else [True, False]
rows = []
for name, tensor in self.name_tensor_list(tensor_names=tensor_names):
for do_fit in do_fits:
if do_fit:
tensor = tensor.fit_to_structure(self.structure, symprec=symprec)
d = tensor.get_structure_property_dict(self.structure,
include_base_props=include_base_props, ignore_errors=ignore_errors)
d.pop("structure")
# Add column telling whether fit has been performed
if len(do_fits) > 1: d["fit_to_structure"] = do_fit
d["tensor_name"] = name
rows.append(d)
return pd.DataFrame(rows, columns=list(rows[0].keys() if rows else None))

View File

@ -12,13 +12,15 @@ from abipy.dfpt.anaddbnc import AnaddbNcFile
class AnaddbNcFileTest(AbipyTest):
def test_base(self):
"""Base tests for AnaddbNcFile"""
"""Testing AnaddbNcFile API"""
anaddbnc_fname = abidata.ref_file("AlAs_nl_dte_anaddb.nc")
with AnaddbNcFile(anaddbnc_fname) as anc:
repr(anc); str(anc)
anc.to_string(verbose=2)
assert anc.structure.formula == "Al1 As1"
assert not anc.has_elastic_data
assert not anc.has_piezoelectric_data
assert anc.becs is not None
assert anc.emacro is not None
assert anc.emacro_rlx is not None
@ -26,7 +28,23 @@ class AnaddbNcFileTest(AbipyTest):
assert anc.dchide is not None
assert anc.oscillator_strength is not None
assert anc.ifc is None
assert not anc.params
assert anc.params["chneut"] == -666
if self.has_nbformat():
anc.write_notebook(nbpath=self.get_tmpname(text=True))
assert anc.write_notebook(nbpath=self.get_tmpname(text=True))
class AnaddbNcRobotTest(AbipyTest):
def test_base(self):
"""Testing AnaddbNcRobot API"""
from abipy import abilab
assert abilab.AnaddbNcRobot.class_handles_filename("anaddb.nc")
assert abilab.AnaddbNcRobot.class_handles_filename("/foo/bar/anaddb.nc")
robot = abilab.AnaddbNcRobot()
assert robot.EXT == "anaddb"
repr(robot); str(robot)
assert robot.to_string(verbose=2)
if self.has_nbformat():
assert robot.write_notebook(nbpath=self.get_tmpname(text=True))

View File

@ -4,6 +4,7 @@ from __future__ import print_function, division, unicode_literals, absolute_impo
import os
import numpy as np
import abipy.data as abidata
import abipy.core.abinit_units as abu
from abipy import abilab
from abipy.core.testing import AbipyTest
@ -36,6 +37,9 @@ class DdbTest(AbipyTest):
assert h.xred.shape == (h.natom, 3) and h.kpt.shape == (h.nkpt, 3)
self.assert_equal(h.znucl, [13, 33])
assert ddb.version == 100401
assert ddb.total_energy is None
assert ddb.cart_forces is None
assert ddb.cart_stress_tensor is None
assert np.all(h.symrel[1].T.ravel() == [0, -1, 1, 0, -1, 0, 1, -1, 0])
assert np.all(h.symrel[2].T.ravel() == [-1, 0, 0, -1, 0, 1, -1, 1, 0])
@ -77,6 +81,7 @@ class DdbTest(AbipyTest):
blocks = ddb._read_blocks()
assert len(blocks) == 1
assert blocks[0]["qpt"] == [0.25, 0, 0]
assert blocks[0]["dord"] == 2
lines = blocks[0]["data"]
assert lines[0].rstrip() == " 2nd derivatives (non-stat.) - # elements : 36"
@ -182,6 +187,7 @@ class DdbTest(AbipyTest):
# Execute anaddb to compute the interatomic forces.
ifc = ddb.anaget_ifc()
str(ifc); repr(ifc)
#assert ifc.to_string(verbose=2)
assert ifc.structure == ddb.structure
assert ifc.number_of_atoms == len(ddb.structure)
@ -324,6 +330,20 @@ class DdbTest(AbipyTest):
Testing DDB containing also third order derivatives.
"""
with abilab.abiopen(abidata.ref_file("refs/alas_nl_dfpt/AlAs_nl_dte_DDB")) as ddb:
repr(ddb); str(ddb)
assert ddb.to_string(verbose=2)
self.assert_almost_equal(ddb.total_energy.to("Ha"), -0.10085769246152e+02)
assert ddb.cart_forces is not None
stress = ddb.cart_stress_tensor
ref_voigt = np.array([-0.31110177329142E-05, -0.31110177329142E-05, -0.31110177329146E-05,
0.00000000000000E+00, 0.00000000000000E+00, 0.00000000000000E+00])
self.assert_almost_equal(stress[0, 0], ref_voigt[0] * abu.Ha_eV / abu.Bohr_Ang**3)
self.assert_almost_equal(stress[1, 1], ref_voigt[1] * abu.Ha_eV / abu.Bohr_Ang**3)
self.assert_almost_equal(stress[2, 2], ref_voigt[2] * abu.Ha_eV / abu.Bohr_Ang**3)
self.assert_almost_equal(stress[1, 2], ref_voigt[3] * abu.Ha_eV / abu.Bohr_Ang**3)
self.assert_almost_equal(stress[0, 2], ref_voigt[4] * abu.Ha_eV / abu.Bohr_Ang**3)
self.assert_almost_equal(stress[0, 1], ref_voigt[5] * abu.Ha_eV / abu.Bohr_Ang**3)
for qpoint in ddb.qpoints:
assert qpoint in ddb.computed_dynmat
@ -331,12 +351,32 @@ class DdbTest(AbipyTest):
"""
Testing DDB containing also third order derivatives.
"""
self.skip_if_abinit_not_ge("8.9.3")
with abilab.abiopen(abidata.ref_file("refs/alas_elastic_dfpt/AlAs_elastic_DDB")) as ddb:
self.assertTrue(ddb.has_strain_terms())
self.assertFalse(ddb.has_strain_terms("all"))
assert ddb.has_strain_terms()
assert not ddb.has_strain_terms("all")
e = ddb.anaget_elastic(has_dde=True, has_gamma_ph=True, verbose=2)
self.assertEqual(e.elastic_relaxed[0,0,0,0], 120.41874336082199)
self.assertEqual(e.piezo_relaxed[0,1,2], -0.030391022487094244)
self.assert_almost_equal(e.elastic_relaxed[0,0,0,0], 120.41874336082199)
self.assert_almost_equal(e.piezo_relaxed[0,1,2], -0.030391022487094244)
assert repr(e); assert str(e)
assert e.to_string(verbose=2)
assert e.structure.formula == "Al1 As1"
assert e.elastic_relaxed._repr_html_()
#assert hasattr(e.elastic_relaxed.compliance_tensor, "_repr_html_")
name_tensor_list = e.name_tensor_list(tensor_type="elastic")
names = [nt[0] for nt in name_tensor_list]
assert "elastic_relaxed" in names
name_tensor_list = e.name_tensor_list(tensor_type="piezoelectric")
names = [nt[0] for nt in name_tensor_list]
assert "piezo_relaxed" in names
edata_fit = e.fit_to_structure()
edata_ieee = e.convert_to_ieee()
df = e.get_voigt_dataframe("elastic_relaxed")
self.assert_almost_equal(df[(0, 0)][0], 120.41874336082199)
df = e.get_elast_properties_dataframe(tensor_names="elastic_relaxed", fit_to_structure=True)
class DielectricTensorGeneratorTest(AbipyTest):
@ -388,7 +428,7 @@ class DdbRobotTest(AbipyTest):
assert r.phdos_plotter.gridplot(show=False)
if self.has_nbformat():
robot.write_notebook(nbpath=self.get_tmpname(text=True))
assert robot.write_notebook(nbpath=self.get_tmpname(text=True))
robot.close()
@ -400,8 +440,6 @@ class PhononComputationTest(AbipyTest):
path = os.path.join(abidata.dirpath, "refs", "mgb2_phonons_nkpt_tsmear", "mgb2_121212k_0.04tsmear_DDB")
ddb = abilab.abiopen(path)
#for dos_method in ("gaussian",):
#for dos_method in ("tetra",):
for dos_method in ("tetra", "gaussian"):
# Get phonon bands and Dos with anaddb.
phbands_file, phdos_file = ddb.anaget_phbst_and_phdos_files(nqsmall=4, ndivsm=2,

View File

@ -660,7 +660,9 @@ class ElectronBands(Has_Structure):
dist_tol: A point is considered to be on the path if its distance from the line
is less than dist_tol.
Return: namedtuple with:
Return:
namedtuple with the following attributes::
ebands: |ElectronBands| object.
ik_new2prev: Correspondence between the k-points in the new ebands and the kpoint
of the previous band structure (self).
@ -2417,7 +2419,7 @@ class ElectronBands(Has_Structure):
# Build new ebands object.
occfacts_kpath = np.zeros_like(eigens_kpath)
ebands_kpath = self.__class__(self.structure, kpath, eigens_kpath, self.fermie, occfacts_kpath,
self.nelect, self.nspinor, self.nspden)
self.nelect, self.nspinor, self.nspden, smearing=self.smearing)
ebands_kmesh = None
if kmesh is not None:
# Get kpts and weights in IBZ.
@ -2432,7 +2434,7 @@ class ElectronBands(Has_Structure):
occfacts_kmesh = np.zeros_like(eigens_kmesh)
ebands_kmesh = self.__class__(self.structure, kpts_kmesh, eigens_kmesh, self.fermie, occfacts_kmesh,
self.nelect, self.nspinor, self.nspden)
self.nelect, self.nspinor, self.nspden, smearing=self.smearing)
return dict2namedtuple(ebands_kpath=ebands_kpath, ebands_kmesh=ebands_kmesh, interpolator=skw)

View File

@ -1,34 +1,33 @@
#!/usr/bin/env python
r"""
Electrons and Phonons from the materials project website
=========================================================
========================================================
This example shows how to dowload the electronic band structure
and the DDB file using the mp identifier and use the AbiPy API
to generate a matplotlib grid with electrons + phonons.
IMPORTANT: Electrons and Phonons have been obtained with different codes
and different computational settings!
and different computational settings! Of course, one can always
initialize ElectronBands and PhononBands from local netcdf files
obtained with Abinit
"""
from abipy import abilab
import abipy.data as abidata
import os
# List of mp ids for Si, Diamond
mpids = ["mp-149", "mp-66"]
# Get list of AbiPy ebands from mpids
# Get list of AbiPy ebands from mpids
ebands_list = [abilab.ElectronBands.from_mpid(mpid) for mpid in mpids]
# Get list of DDB files from the MP website and run anaddb to get the phonon bands.
phbands_list = []
for mpid in mpids:
print("Downloading DDB for mpid:", mpid)
for i, mpid in enumerate(mpids):
print("Downloading DDB for mpid %s (%s) ..." % (mpid, ebands_list[i].structure.formula))
ddb = abilab.DdbFile.from_mpid(mpid)
if ddb is None:
if ddb is None:
raise RuntimeError("%d does not provide DDB" % mpid)
print("Invoking anaddb...")
print("Invoking anaddb to compute phonon bands...")
phbst, _ = ddb.anaget_phbst_and_phdos_files(nqsmall=0)
phbands_list.append(phbst.phbands)
phbst.close()
@ -40,11 +39,16 @@ ax_mat, fig, plt = abilab.get_axarray_fig_plt(None, nrows=nrows, ncols=ncols,
sharex=False, sharey=False, squeeze=False)
# Use the `ax` keyword argument to select the matplotlib Axes used to plot the object.
# In the bans structure plot, we show the fundamental/direct gap as well as the possible
# phonon-absorption (-emission) processes allowed by energy-conservation
# In the band structure plot, we show the fundamental/direct gap as well as the possible
# phonon-absorption (-emission) processes allowed by energy-conservation.
# (This is a qualitative analysis of e-ph scattering, quasi-momentum and ph dispersion are not taken into account).
for i, (ebands, phbands) in enumerate(zip(ebands_list, phbands_list)):
ebands.plot(ax=ax_mat[i, 0], with_gaps=True, ylims=(-5, 10), max_phfreq=phbands.maxfreq, show=False)
phbands.plot(ax=ax_mat[i, 1], show=False)
# Hide xlabel if not last row.
if i != len(ebands_list) - 1:
for ax in ax_mat[i]:
ax.xaxis.label.set_visible(False)
plt.show()

View File

@ -90,6 +90,13 @@ TODO list:
* Had to increase fermie again to get correct gap in diamond treated if FD smearing
* XG remarks:
- Improve abipy/docs/README.rst mentions
- Change bibtex style (use same format as abidocs)
open _build/html/index.html
## Low priority
* Use parser subclass to avoid boiler plate code.

View File

@ -449,6 +449,13 @@ def abicomp_ddb(options):
return _invoke_robot(options)
def abicomp_anaddb(options):
"""
Compare multiple anaddb.nc files.
"""
return _invoke_robot(options)
def abicomp_phbst(options):
"""
Compare multiple PHBST.nc files.
@ -553,7 +560,8 @@ def _invoke_robot(options):
# To define an Help action
# http://stackoverflow.com/questions/20094215/argparse-subparser-monolithic-help-output?rq=1
paths = options.paths
#print(paths)
if options.verbose > 1:
print("In _invoke_robot with paths", paths)
if os.path.isdir(paths[0]):
# Assume directory.
@ -581,9 +589,6 @@ def _invoke_robot(options):
elif options.print or options.expose:
robot.trim_paths()
#df = robot.get_params_dataframe()
#abilab.print_dataframe(df, title="Output of robot.get_params_dataframe():")
# Print dataframe if robot provides get_dataframe method.
if hasattr(robot, "get_dataframe"):
try:
@ -739,6 +744,12 @@ Usage example:
abicomp.py phdos *_PHDOS.nc -nb => Compare phonon DOSes in the jupyter notebook.
abicomp.py ddb outdir1 outdir2 out_DDB -nb => Analyze all DDB files in directories outdir1, outdir2 and out_DDB file.
###############
# Anaddb netcdf
###############
abicomp anaddb tutorespfn_telast_2-telast_3/anaddb.nc
#########
# E-PH
#########
@ -956,6 +967,7 @@ def get_parser(with_epilog=False):
p_gsr = subparsers.add_parser('gsr', parents=robot_parents, help=abicomp_gsr.__doc__)
p_hist = subparsers.add_parser('hist', parents=robot_parents, help=abicomp_hist.__doc__)
p_ddb = subparsers.add_parser('ddb', parents=robot_parents, help=abicomp_ddb.__doc__)
p_anaddb = subparsers.add_parser('anaddb', parents=robot_parents, help=abicomp_anaddb.__doc__)
p_phbst = subparsers.add_parser('phbst', parents=robot_parents, help=abicomp_phbst.__doc__)
p_sigres = subparsers.add_parser('sigres', parents=robot_parents, help=abicomp_sigres.__doc__)
p_mdf = subparsers.add_parser('mdf', parents=robot_parents, help=abicomp_mdf.__doc__)

View File

@ -525,7 +525,7 @@ class AbiwanReader(ElectronsReader):
class AbiwanRobot(Robot, RobotWithEbands):
"""
This robot analyzes the results contained in multiple ABIWAN.nc_ files.
This robot analyzes the results contained in multiple ABIWAN.nc files.
.. rubric:: Inheritance Diagram
.. inheritance-diagram:: AbiwanRobot

View File

@ -46,6 +46,11 @@ Remember to issue::
before running ``make`` to activate the generation of the thumbnails in :file:`abipy/examples/flows`.
The documentation is produced in :file:`_build/html`.
Use::
open _build/html/index.html
to open the homepage in the browser.
You can run ``make help`` to see information on all possible make targets.
@ -148,11 +153,10 @@ Here are a few additional things to keep in mind:
The transforms have been completely revamped.
* The autodoc extension will handle index entries for the API, but additional
entries in the index_ need to be explicitly added.
entries in the index need to be explicitly added.
.. _documentation: http://sphinx.pocoo.org/contents.html
.. _`inline markup`: http://sphinx.pocoo.org/markup/inline.html
.. _index: http://sphinx.pocoo.org/markup/para.html#index-generating-markup
.. _documentation: http://www.sphinx-doc.org/en/master/
.. _`inline markup`: http://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html?highlight=inline#inline-markup
Docstrings
----------

View File

@ -112,4 +112,5 @@ Testing
Abipy has a testing infrastructure based on :mod:`unittest` and pytest_.
Common test support is provided by :mod:`abipy.core.testing`,
data files are storeed in :file:`abipy/tests/data`.
data files are stored in :file:`abipy/data`, in particular in :file:`abipy/data/refs` that
contains several output files that can be used for writing unit tests and examples.

View File

@ -559,6 +559,7 @@ Travis
mpi_runner: mpirun
pre_run:
- source activate test-environment
- ulimit -s unlimited
limits:
min_cores: 1
max_cores: 2