Commit 07116308 authored by Mohamed, Fawzi Roberto (fawzi)'s avatar Mohamed, Fawzi Roberto (fawzi)
Browse files

meta 2.0 changes

parent bf2a0ea6
......@@ -84,6 +84,9 @@ QE_SMEARING_KIND = {
'tetrahedron method': 'tetrahedra',
}
def metaN(metaName):
"""Retrurns a normalized meta name"""
return metaName.replace(".", "_").lower()
class ParserQuantumEspresso(object):
"""Base class for all Quantum Espresso parsers"""
......@@ -259,9 +262,9 @@ class ParserQuantumEspresso(object):
def addDict(self, backend, this_dict):
for key, value in sorted(this_dict.items()):
backend.addValue(key, value)
backend.addValue(metaN(key), value)
def addSectionDict(self, backend, section_name, section_dict):
gIndex = backend.openSection(section_name)
gIndex = backend.openSection(metaN(section_name))
self.addDict(backend, section_dict)
backend.closeSection(section_name, gIndex)
backend.closeSection(metaN(section_name), gIndex)
......@@ -25,6 +25,7 @@ import nomadcore.unit_conversion.unit_conversion as unit_conversion
import math
import numpy as np
import QuantumEspressoCommon as QeC
from QuantumEspressoCommon import metaN
from nomadcore.parser_backend import valueForStrValue
from QuantumEspressoCommon import RE_f, RE_i, cRE_f, cRE_i
from QuantumEspressoXC import translate_qe_xc_num
......@@ -124,7 +125,7 @@ class QuantumEspressoParserPWSCF(QeC.ParserQuantumEspresso):
target = re.sub(r'^x_qe_t_',r'x_qe_',key)
if target == key:
raise Exception('found non-temporary key in pseudopotential cache: "%s"' % (key))
backend.addValue(target, value[-1])
backend.addValue(metaN(target), value[-1])
if pp['x_qe_t_pp_idx'] is not None:
pp_num = pp['x_qe_t_pp_idx'][-1]
pp_report = self.cache_t_pp_report.get(pp_num, None)
......@@ -154,7 +155,7 @@ class QuantumEspressoParserPWSCF(QeC.ParserQuantumEspresso):
dft_d = cache_dft_d.get(pp_label, None)
if dft_d is not None:
for k, v in dft_d.items():
backend.addValue(k, v)
backend.addValue(metaN(k), v)
def onClose_x_qe_t_section_pp_report(
self, backend, gIndex, section):
......@@ -214,15 +215,15 @@ class QuantumEspressoParserPWSCF(QeC.ParserQuantumEspresso):
else:
LOGGER.error("x_qe_xc_functional_num is not set")
if method_xc_functionals is not None:
# NOTE: value of XC_functional generated by translate_qe_xc_num
# NOTE: value of xc_functional generated by translate_qe_xc_num
# does not fully respect the metaInfo definition
# when XC_functional_parameters are involved.
# when xc_functional_parameters are involved.
# Therefore, remove it here
method_xc_functionals.pop('XC_functional', None)
method_xc_functionals.pop('xc_functional', None)
self.addDict(backend, method_xc_functionals)
if xc_functionals is not None:
for xc_functional in xc_functionals:
self.addSectionDict(backend, 'section_XC_functionals', xc_functional)
self.addSectionDict(backend, 'section_xc_functionals', xc_functional)
else:
LOGGER.error("error getting xc_functionals")
if section['x_qe_t_allocated_array_name'] is not None:
......@@ -304,7 +305,7 @@ class QuantumEspressoParserPWSCF(QeC.ParserQuantumEspresso):
self, backend, gIndex, section):
"""trigger called when section_single_configuration_calculation
is closed"""
backend.addValue('single_configuration_to_calculation_method_ref', self.sectionIdx['section_method'])
backend.addValue('single_configuration_calculation_to_method_ref', self.sectionIdx['section_method'])
backend.addValue('single_configuration_calculation_to_system_ref', self.sectionIdx['section_system'])
# extract k band structure data if available
self.create_section_eigenvalues(backend, section)
......@@ -315,10 +316,13 @@ class QuantumEspressoParserPWSCF(QeC.ParserQuantumEspresso):
backend.addArrayValues('x_qe_energy_decomposition_value', np.asarray(
section['x_qe_t_energy_decomposition_value']))
if section['x_qe_t_force_x'] is not None:
# constraints etc. not part of the reported forces, so correct metaInfo is 'atom_forces_raw'
backend.addArrayValues('atom_forces_raw', np.array([
sectFId = backend.openSection("section_atom_forces")
# constraints etc. not part of the reported forces
backend.addValue('atom_forces_constraints', 'raw')
backend.addArrayValues('atom_forces', np.array([
section['x_qe_t_force_x'], section['x_qe_t_force_y'], section['x_qe_t_force_z']
]).T)
backend.closeSection("section_atom_forces", sectFId)
if section['x_qe_t_dispersion_force_x'] is not None:
backend.addArrayValues('x_qe_atom_dispersion_force', np.array([
section['x_qe_t_dispersion_force_x'], section['x_qe_t_dispersion_force_y'], section['x_qe_t_dispersion_force_z']
......@@ -385,7 +389,7 @@ class QuantumEspressoParserPWSCF(QeC.ParserQuantumEspresso):
new_system['x_qe_t_k_info_ik'] = section['x_qe_t_md_k_info_ik']
for target, data in new_system.items():
for val in data:
backend.addValue(target, val)
backend.addValue(metaN(target), val)
backend.closeSection('section_system', next_system_gIndex)
def onClose_section_scf_iteration(
......@@ -622,22 +626,22 @@ class QuantumEspressoParserPWSCF(QeC.ParserQuantumEspresso):
LOGGER.debug("No K-point weight info found in output")
if section['x_qe_t_dense_FFT_grid_x'] is not None:
backend.addArrayValues('x_qe_dense_FFT_grid', np.array([
section['x_qe_t_dense_FFT_grid_x'], section['x_qe_t_dense_FFT_grid_y'], section['x_qe_t_dense_FFT_grid_z']
if section['x_qe_t_dense_fft_grid_x'] is not None:
backend.addArrayValues('x_qe_dense_fft_grid', np.array([
section['x_qe_t_dense_fft_grid_x'], section['x_qe_t_dense_fft_grid_y'], section['x_qe_t_dense_fft_grid_z']
]).T)
elif old_system is not None:
# unless espresso explicitly writes new FFT grid info, sampling is kept fixed
backend.addArrayValues('x_qe_dense_FFT_grid', old_system['x_qe_dense_FFT_grid'][-1])
backend.addArrayValues('x_qe_dense_fft_grid', old_system['x_qe_dense_fft_grid'][-1])
else:
LOGGER.warning("No dense FFT grid info found in output")
if section['x_qe_t_smooth_FFT_grid_x'] is not None:
backend.addArrayValues('x_qe_smooth_FFT_grid', np.array([
section['x_qe_t_smooth_FFT_grid_x'], section['x_qe_t_smooth_FFT_grid_y'], section['x_qe_t_smooth_FFT_grid_z']
if section['x_qe_t_smooth_fft_grid_x'] is not None:
backend.addArrayValues('x_qe_smooth_fft_grid', np.array([
section['x_qe_t_smooth_fft_grid_x'], section['x_qe_t_smooth_fft_grid_y'], section['x_qe_t_smooth_fft_grid_z']
]).T)
elif old_system is not None and old_system['x_qe_smooth_FFT_grid'] is not None:
backend.addArrayValues('x_qe_smooth_FFT_grid', old_system['x_qe_smooth_FFT_grid'][-1])
elif old_system is not None and old_system['x_qe_smooth_fft_grid'] is not None:
backend.addArrayValues('x_qe_smooth_fft_grid', old_system['x_qe_smooth_fft_grid'][-1])
if section['x_qe_t_vec_supercell_x'] is not None:
backend.addArrayValues('x_qe_vec_supercell', np.array([
......@@ -676,14 +680,14 @@ class QuantumEspressoParserPWSCF(QeC.ParserQuantumEspresso):
self.tmp.pop('x_qe_t_profile_category', None)
# manually open header sections, closed at the beginning of scf
for sec in self.header_sections():
gIndex = backend.openSection(sec)
gIndex = backend.openSection(metaN(sec))
self.openSectionIdx[sec] = gIndex
def adHoc_final_scf_MD(self, parser):
"""final SCF calculation in VC-relax runs needs open header sections"""
# manually open header sections, closed at the beginning of scf
for sec in self.header_sections():
gIndex = parser.backend.openSection(sec)
gIndex = parser.backend.openSection(metaN(sec))
self.openSectionIdx[sec] = gIndex
def onClose_section_run(
......@@ -710,8 +714,8 @@ class QuantumEspressoParserPWSCF(QeC.ParserQuantumEspresso):
backend.addValue('sampling_method', QE_MD_RELAX_SAMPLING_METHOD[self.tmp['md_relax']])
backend.closeSection('section_sampling_method', sampling_method_gIndex)
frame_sequence_gIndex = backend.openSection('section_frame_sequence')
backend.addValue('frame_sequence_to_sampling_ref', sampling_method_gIndex)
backend.addArrayValues('frame_sequence_local_frames_ref', np.array(self.tmp['frames']))
backend.addValue('frame_sequence_to_sampling_method_ref', sampling_method_gIndex)
backend.addArrayValues('frame_sequence_to_frames_ref', np.array(self.tmp['frames']))
backend.closeSection('section_frame_sequence', frame_sequence_gIndex)
def appendToTmp(self, tmpname, value):
......@@ -732,7 +736,7 @@ class QuantumEspressoParserPWSCF(QeC.ParserQuantumEspresso):
self.tmp['dispersion_correction'] = {}
self.tmp['dispersion_correction'][parser.lastMatch['x_qe_t_species_dispersion_correction_label']] = {
'x_qe_dispersion_correction_vdw_radius': parser.lastMatch['x_qe_t_species_dispersion_correction_vdw_radius'],
'x_qe_dispersion_correction_C6': parser.lastMatch['x_qe_t_species_dispersion_correction_C6'],
'x_qe_dispersion_correction_c6': parser.lastMatch['x_qe_t_species_dispersion_correction_c6'],
}
def adHoc_alat(self, parser):
......@@ -1039,22 +1043,22 @@ class QuantumEspressoParserPWSCF(QeC.ParserQuantumEspresso):
),
SM(name='dense_grid',
startReStr=(r"\s*Dense\s+grid:\s*(?P<x_qe_dense_g_vectors>\d+)\s*G-vectors\s*FFT\s+dimensions:\s*\(\s*" +
QeC.re_vec("x_qe_t_dense_FFT_grid", split=r"\s*,\s*") + "\s*\)\s*$")
QeC.re_vec("x_qe_t_dense_fft_grid", split=r"\s*,\s*") + "\s*\)\s*$")
),
SM(name='dense_grid_old',
startReStr=(r"\s*G\s+cutoff\s*=\s*(?P<x_qe_dense_g_cutoff>" + RE_f + r")\s*" +
r"\(\s*(?P<x_qe_dense_g_vectors>\d+)\s*G-vectors\s*\)\s*FFT\s+grid:\s*\(\s*" +
QeC.re_vec("x_qe_t_dense_FFT_grid", split=r"\s*,\s*") + "\s*\)\s*$"
QeC.re_vec("x_qe_t_dense_fft_grid", split=r"\s*,\s*") + "\s*\)\s*$"
),
),
SM(name='smooth_grid',
startReStr=(r"\s*Smooth\s+grid:\s*(?P<x_qe_smooth_g_vectors>\d+)\s*G-vectors\s*FFT\s+dimensions:\s*\(\s*" +
QeC.re_vec("x_qe_t_smooth_FFT_grid", split=r"\s*,\s*") + "\s*\)\s*$")
QeC.re_vec("x_qe_t_smooth_fft_grid", split=r"\s*,\s*") + "\s*\)\s*$")
),
SM(name='smooth_grid_old',
startReStr=(r"\s*G\s+cutoff\s*=\s*(?P<x_qe_smooth_g_cutoff>" + RE_f + r")\s*" +
r"\(\s*(?P<x_qe_smooth_g_vectors>\d+)\s*G-vectors\s*\)\s*smooth\s+grid:\s*\(\s*" +
QeC.re_vec("x_qe_t_smooth_FFT_grid", split=r"\s*,\s*") + "\s*\)\s*$"
QeC.re_vec("x_qe_t_smooth_fft_grid", split=r"\s*,\s*") + "\s*\)\s*$"
),
),
SM(name='core_charge_realspace',
......@@ -1556,7 +1560,7 @@ class QuantumEspressoParserPWSCF(QeC.ParserQuantumEspresso):
for sec in self.header_sections():
sec_gIndex = self.openSectionIdx.pop(sec,None)
if sec_gIndex is not None:
backend.closeSection(sec, sec_gIndex)
backend.closeSection(metaN(sec), sec_gIndex)
def run_submatchers(self):
"""submatchers of section_run"""
......@@ -1730,7 +1734,7 @@ class QuantumEspressoParserPWSCF(QeC.ParserQuantumEspresso):
SM(name='dispersion_correction_values', repeats=True,
startReStr=(r"\s*(?P<x_qe_t_species_dispersion_correction_label>.+?)\s+" +
r"(?P<x_qe_t_species_dispersion_correction_vdw_radius>" + RE_f + r")" +
r"\s*(?P<x_qe_t_species_dispersion_correction_C6>" + RE_f + r")\s*$"),
r"\s*(?P<x_qe_t_species_dispersion_correction_c6>" + RE_f + r")\s*$"),
adHoc=self.adHoc_dispersion_correction_values,
),
],
......@@ -1850,12 +1854,12 @@ class QuantumEspressoParserPWSCF(QeC.ParserQuantumEspresso):
subMatchers=[
SM(name='sticks_sum', required=True,
startReStr=(
r"\s*Sum\s+(?P<x_qe_sticks_sum_dense>\d+)\s+(?P<x_qe_sticks_sum_smooth>\d+)\s+(?P<x_qe_sticks_sum_PW>\d+)" +
r"\s+(?P<x_qe_sticks_sum_G_dense>\d+)\s+(?P<x_qe_sticks_sum_G_smooth>\d+)\s+(?P<x_qe_sticks_sum_G_PW>\d+)\s*$"
r"\s*Sum\s+(?P<x_qe_sticks_sum_dense>\d+)\s+(?P<x_qe_sticks_sum_smooth>\d+)\s+(?P<x_qe_sticks_sum_pw>\d+)" +
r"\s+(?P<x_qe_sticks_sum_g_dense>\d+)\s+(?P<x_qe_sticks_sum_g_smooth>\d+)\s+(?P<x_qe_sticks_sum_g_pw>\d+)\s*$"
),
),
SM(name='sticks_tot',
startReStr=r"\s*Tot\s+(?P<x_qe_sticks_tot_dense>\d+)\s+(?P<x_qe_sticks_tot_smooth>\d+)\s+(?P<x_qe_sticks_tot_PW>\d+)\s*$",
startReStr=r"\s*Tot\s+(?P<x_qe_sticks_tot_dense>\d+)\s+(?P<x_qe_sticks_tot_smooth>\d+)\s+(?P<x_qe_sticks_tot_pw>\d+)\s*$",
),
],
),
......
......@@ -48,7 +48,7 @@ object QuantumEspressoParser extends SimpleExternalParserGenerator(
"parser-quantum-espresso/setup_paths.py",
"nomad_meta_info/public.nomadmetainfo.json",
"nomad_meta_info/common.nomadmetainfo.json",
"nomad_meta_info/meta_types.nomadmetainfo.json",
"nomad_meta_info/meta.nomadmetainfo.json",
"nomad_meta_info/quantum_espresso.nomadmetainfo.json"
) ++ DefaultPythonInterpreter.commonFiles(),
dirMap = Map(
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment