Skip to content
Snippets Groups Projects
Commit b5b981f7 authored by Pardini, Lorenzo (lopa)'s avatar Pardini, Lorenzo (lopa)
Browse files

wien2k input file

parent 4f71b2f9
No related branches found
No related tags found
No related merge requests found
Pipeline #
# Copyright 2017-2018 Lorenzo Pardini from builtins import object
import setup_paths
from nomadcore.simple_parser import mainFunction, CachingLevel
from nomadcore.simple_parser import SimpleMatcher as SM
from nomadcore.local_meta_info import loadJsonFile, InfoKindEl
import os, sys, json, logging
import numpy as np
import ase.geometry
################################################################
# This is the subparser for the main WIEN2k input file (.struct)
################################################################
# Copyright 2016-2018 Daria M. Tomecka, Fawzi Mohamed
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
...@@ -12,68 +26,114 @@ ...@@ -12,68 +26,114 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import xml.sax __author__ = "Daria M. Tomecka"
import logging __maintainer__ = "Daria M. Tomecka"
import numpy as np __email__ = "tomeckadm@gmail.com;"
from nomadcore.unit_conversion.unit_conversion import convert_unit_function __date__ = "15/05/2017"
from nomadcore.unit_conversion.unit_conversion import convert_unit
from nomadcore.unit_conversion import unit_conversion ########### Modified for elastic by Lorenzo Pardini ##################
class InputHandler(xml.sax.handler.ContentHandler): class Wien2kStructContext(object):
"""context for wien2k struct parser"""
def __init__(self, backend):
self.backend = backend def __init__(self):
self.inputSectionGIndex = -1 self.parser = None
self.basevect = []
self.latticeDummy = '' def initialize_values(self):
self.CurrentData = '' """allows to reset values if the same superContext is used to parse different files"""
self.atomCoor = []
self.atomCoorDummy = []
self.speciesfileDummy = ''
self.speciesfile = []
self.scale = 1
self.cell = []
self.cellDummy = []
def endDocument(self):
bohr_to_m = convert_unit(1, "bohr", "m")
for i in range(0,len(self.cellDummy)):
for j in range(0,3):
self.cell[i].append(float(self.cellDummy[i][j])*self.scale*bohr_to_m)
self.backend.addValue("lattice_vectors", self.cell)
self.backend.addValue('atom_positions',self.atomCoor)
for i in range(0,len(self.atomCoor)):
self.speciesfile.append(self.speciesfileDummy)
self.backend.addValue("atom_labels", self.speciesfile)
def startElement(self, name, attrs):
self.CurrentData = name
if name == "crystal":
self.scale = float(attrs.getValue('scale'))
elif name == 'species':
self.speciesfileDummy = attrs.getValue('speciesfile')[:-4]
elif name == 'atom':
self.atomCoorDummy = attrs.getValue('coord').split()
for j in range(0,3):
self.atomCoorDummy[j]=float(self.atomCoorDummy[j])
self.atomCoor.append(self.atomCoorDummy)
else:
pass
def endElement(self, name):
pass pass
def characters(self, content): def startedParsing(self, path, parser):
if self.CurrentData == 'basevect': """called when parsing starts"""
self.latticeDummy = content self.parser = parser
lattice = self.latticeDummy.split() # allows to reset values if the same superContext is used to parse different files
if lattice != []: self.initialize_values()
self.cellDummy.append(lattice)
self.cell.append([]) def onClose_section_system(self, backend, gIndex, section):
else: # unit_cell
pass unit_cell_params = []
for i in ['a', 'b', 'c']:
def parseInput(inF, backend): uci = section['x_elastic_wien2k_unit_cell_param_' + i]
handler = InputHandler(backend) #if uci is not None:
logging.error("will parse") unit_cell_params.append(uci[0])
xml.sax.parse(inF, handler) for i in ['alfa', 'beta', 'gamma']:
logging.error("did parse") uci = section['x_elastic_wien2k_angle_between_unit_axis_' + i]
# if uci is not None:
unit_cell_params.append(uci[0])
unit_cell = ase.geometry.cellpar_to_cell(unit_cell_params)
backend.addArrayValues('lattice_vectors', unit_cell)
# backend.addArrayValues("configuration_periodic_dimensions", np.ones(3, dtype=bool))
equiv_atoms = section["x_elastic_wien2k_section_equiv_atoms"]
#logging.error("section: %s", section)
labels = []
pos = []
for eqAtoms in equiv_atoms:
label = eqAtoms["x_elastic_wien2k_atom_name"][0]
x = eqAtoms["x_elastic_wien2k_atom_pos_x"]
y = eqAtoms["x_elastic_wien2k_atom_pos_y"]
z = eqAtoms["x_elastic_wien2k_atom_pos_z"]
#logging.error("equiv_atoms: %s x %s y %s z %s",eqAtoms, x, y, z)
if len(x) != len(y) or len(x) != len(z):
raise Exception("incorrect parsing, different number of x,y,z components")
groupPos = [[x[i],y[i],z[i]] for i in range(len(x))]
nAt = len(groupPos)
labels += [label for i in range(nAt)]
pos += groupPos
backend.addValue("atom_labels", labels)
backend.addArrayValues('atom_positions', np.asarray(pos))
# description of the input
def buildStructureMatchers():
return SM(
name = 'root',
weak = True,
startReStr = "",
sections = ["section_run", "section_system"],
subMatchers = [
SM(name = 'systemName',
startReStr = r"(?P<x_elastic_wien2k_system_nameIn>.*)"),
SM(r"\w+\s*LATTICE,NONEQUIV\.ATOMS.\s*(?P<x_elastic_wien2k_nonequiv_atoms>[0-9]+)"),
SM(r"(?P<x_elastic_wien2k_calc_mode>.*)"),
# SM(r"\s*(?P<x_wien2k_unit_cell_param_a>[-+0-9.eEdD]+)\s*(?P<x_wien2k_unit_cell_param_b>[-+0-9.eEdD]+)\s*(?P<x_wien2k_unit_cell_param_c>[-+0-9.eEdD]+)\s*(?P<x_wien2k_angle_between_unit_axis_alfa>[-+0-9.eEdD]{9})\s*(?P<x_wien2k_angle_between_unit_axis_beta>[-+0-9.eEdD]{9})\s*(?P<x_wien2k_angle_between_unit_axis_gamma>[-+0-9.eEdD]+)"),
SM(r"\s*(?P<x_elastic_wien2k_unit_cell_param_a__angstrom>[-+0-9]*\.\d{0,6}){0,10}\s*(?P<x_elastic_wien2k_unit_cell_param_b__angstrom>[-+0-9]*\.\d{0,6}){0,10}\s*(?P<x_elastic_wien2k_unit_cell_param_c__angstrom>[-+0-9]*\.\d{0,6}){0,10}\s*(?P<x_elastic_wien2k_angle_between_unit_axis_alfa>[-+]?[0-9]*\.\d{0,6}){0,10}\s*(?P<x_elastic_wien2k_angle_between_unit_axis_beta>[-+]?[0-9]*\.\d{0,6}){0,10}\s*(?P<x_elastic_wien2k_angle_between_unit_axis_gamma>[-+]?[0-9]*\.\d*)"),
SM(r"\s*ATOM\s*[-0-9]+:\s*X=(?P<x_elastic_wien2k_atom_pos_x__angstrom>[-+0-9.eEdD]+)\s*Y=(?P<x_elastic_wien2k_atom_pos_y__angstrom>[-+0-9.eEdD]+)\s*Z=(?P<x_elastic_wien2k_atom_pos_z__angstrom>[-+0-9.eEdD]+)",
repeats=True,
sections=["x_elastic_wien2k_section_equiv_atoms"],
subMatchers=[
SM(r"\s*[-0-9]+:\s*X=(?P<x_elastic_wien2k_atom_pos_x__angstrom>[-+0-9.eEdD]+)\s*Y=(?P<x_elastic_wien2k_atom_pos_y__angstrom>[-+0-9.eEdD]+)\s*Z=(?P<x_elastic_wien2k_atom_pos_z__angstrom>[-+0-9.eEdD]+)",
repeats=True
),
# SM(r"\s*(?P<atom>.{10})\s*NPT=\s*(?P<NPT>[0-9]+)\s*R0=(?P<r0>[0-9.]+)\s*RMT=\s*(?P<rmt>[0-9.]+)\s*Z:\s*(?P<z>[0-9.]+)",)
SM(r"\s*(?P<x_elastic_wien2k_atom_name>^.+)\s*NPT=\s*(?P<x_elastic_wien2k_NPT>[0-9]+)\s*R0=(?P<x_elastic_wien2k_R0>[0-9.]+)\s*RMT=\s*(?P<x_elastic_wien2k_RMT>[0-9.]+)\s*Z:\s*(?P<x_elastic_wien2k_atomic_number_Z>[0-9.]+)",)
]
)
])
def get_cachingLevelForMetaName(metaInfoEnv, CachingLvl):
"""Sets the caching level for the metadata.
Args:
metaInfoEnv: metadata which is an object of the class InfoKindEnv in nomadcore.local_meta_info.py.
CachingLvl: Sets the CachingLevel for the sections k_band, run, and single_configuration_calculation.
This allows to run the parser without opening new sections.
Returns:
Dictionary with metaname as key and caching level as value.
"""
# manually adjust caching of metadata
cachingLevelForMetaName = {
'section_run': CachingLvl,
'section_system': CachingLvl
}
cachingLevelForMetaName["x_elastic_wien2k_system_nameIn"] = CachingLevel.ForwardAndCache
cachingLevelForMetaName["x_elastic_wien2k_section_equiv_atoms"] = CachingLevel.ForwardAndCache
cachingLevelForMetaName["atom_labels"] = CachingLevel.ForwardAndCache
return cachingLevelForMetaName
# loading metadata from nomad-meta-info/meta_info/nomad_meta_info/fhi_aims.nomadmetainfo.json
This diff is collapsed.
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment