Commit 64297824 authored by Markus Scheidgen's avatar Markus Scheidgen
Browse files

Merge remote-tracking branch 'origin/new-vasp-parser-rebase' into reprocess

parents 16fe9a25 affb8137
Pipeline #93141 failed with stages
in 38 minutes and 37 seconds
......@@ -162,9 +162,9 @@ deploy dev:
script:
- RELEASE_NAME=`echo ${CI_COMMIT_REF_NAME} | sed -e 's/[^A-Za-z0-9\-]/-/g'`
- helm dependency update ops/helm/nomad
- helm upgrade --namespace nomad --install $RELEASE_NAME ops/helm/nomad -f ops/helm/nomad/deployments/dev-values.yaml --set proxy.external.path=/dev/nomad/$RELEASE_NAME,image.tag=$CI_COMMIT_REF_NAME,roll=true --wait
- helm upgrade --namespace nomad --install $RELEASE_NAME ops/helm/nomad -f ops/helm/nomad/deployments/dev-values.yaml --set proxy.external.path=/dev/rae/$RELEASE_NAME,image.tag=$CI_COMMIT_REF_NAME,roll=true --wait
- docker pull $TEST_IMAGE
- docker run -t -e NOMAD_KEYCLOAK_REALM_NAME=fairdi_nomad_prod $TEST_IMAGE python -m nomad.cli client -n https://nomad-lab.eu/dev/nomad/$RELEASE_NAME/api -u test -w $CI_NOMAD_TEST_PASSWORD integrationtests --skip-publish --skip-doi
- docker run -t -e NOMAD_KEYCLOAK_REALM_NAME=fairdi_nomad_prod $TEST_IMAGE python -m nomad.cli client -n https://nomad-lab.eu/dev/rae/$RELEASE_NAME/api -u test -w $CI_NOMAD_TEST_PASSWORD integrationtests --skip-publish --skip-doi
except:
- /^dev-.*$/
when: manual
......
Subproject commit 4c35bae7f2834e9657eea438c0a98f8810ac415b
Subproject commit 08f41c99116174af378b7f1afe4925a711a4b50f
......@@ -1523,7 +1523,6 @@ class Dos(MSection):
dos_values_lm = Quantity(
type=np.dtype(np.float64),
shape=['number_of_dos_lms', 'number_of_spin_channels', 'number_of_atoms', 'number_of_dos_values'],
unit='joule',
description='''
Array containing the density (electronic-energy) of states values projected on the
various spherical harmonics (integrated on all atoms), see
......
......@@ -54,6 +54,6 @@ normalizers: Iterable[Type[Normalizer]] = [
# FhiAimsBaseNormalizer,
DosNormalizer,
BandStructureNormalizer,
EncyclopediaNormalizer,
WorkflowNormalizer,
EncyclopediaNormalizer,
]
......@@ -47,6 +47,23 @@ class EncyclopediaNormalizer(Normalizer):
calc_enums = Calculation.calculation_type.type
calc_type = calc_enums.unavailable
# Primarily try to determine the calculation type from workflow
# information
try:
workflow = self.entry_archive.section_workflow
workflow_map = {
"molecular_dynamics": calc_enums.molecular_dynamics,
"geometry_optimization": calc_enums.geometry_optimization,
"phonon": calc_enums.phonon_calculation,
}
workflow_enum = workflow_map.get(workflow.workflow_type)
if workflow_enum is not None:
calc.calculation_type = workflow_enum
return workflow_enum
except Exception:
pass
# Fall back to old frame sequence data
try:
sccs = self.section_run.section_single_configuration_calculation
except Exception:
......
......@@ -93,22 +93,34 @@ class SystemBasedNormalizer(Normalizer, metaclass=ABCMeta):
system = None
scc = None
# Try to find workflow information and select the representative system
# based on it
workflow = self.entry_archive.section_workflow
if workflow:
try:
iscc = workflow.calculation_result_ref
system = scc.single_configuration_calculation_to_system_ref
if system is not None:
scc = iscc
except Exception:
pass
# Try to find a frame sequence, only first found is considered
try:
frame_seqs = self.section_run.section_frame_sequence
frame_seq = frame_seqs[0]
sec_sampling_method = frame_seq.frame_sequence_to_sampling_ref
sampling_method = sec_sampling_method.sampling_method
frames = frame_seq.frame_sequence_local_frames_ref
if sampling_method == "molecular_dynamics":
iscc = frames[0]
else:
iscc = frames[-1]
system = iscc.single_configuration_calculation_to_system_ref
if system is not None:
scc = iscc
except Exception:
pass
else:
try:
frame_seqs = self.section_run.section_frame_sequence
frame_seq = frame_seqs[0]
sec_sampling_method = frame_seq.frame_sequence_to_sampling_ref
sampling_method = sec_sampling_method.sampling_method
frames = frame_seq.frame_sequence_local_frames_ref
if sampling_method == "molecular_dynamics":
iscc = frames[0]
else:
iscc = frames[-1]
system = iscc.single_configuration_calculation_to_system_ref
if system is not None:
scc = iscc
except Exception:
pass
# If no frame sequences detected, try to find valid scc by looping all
# available in reverse order until a valid one is found.
......
......@@ -305,7 +305,11 @@ class WorkflowNormalizer(Normalizer):
self._phonon_programs = ['phonopy']
def _resolve_workflow_type_vasp(self):
ibrion = self.section_run.section_method[0].x_vasp_incarOut_IBRION
try:
ibrion = self.section_run.section_method[0].x_vasp_incarOut_IBRION
except Exception:
ibrion = 1
if ibrion == 0:
workflow_type = "molecular_dynamics"
else:
......
......@@ -17,6 +17,9 @@ import os
import logging
import pint
from typing import Any, Dict
import gzip
import bz2
import lzma
class FileParser:
......@@ -67,6 +70,18 @@ class FileParser:
self._file_handler = None
self._mainfile = os.path.abspath(val) if val is not None else val
@property
def open(self):
if self.mainfile.endswith('.gz'):
open_file = gzip.open
elif self.mainfile.endswith('.bz2'):
open_file = bz2.open
elif self.mainfile.endswith('.xz'):
open_file = lzma.open
else:
open_file = open
return open_file
def get(self, key: str, default: Any = None, unit: str = None, **kwargs):
'''
Returns the parsed result for quantity with name key. If quantity is not in
......
......@@ -15,6 +15,7 @@
import logging
import mmap
import io
import re
import numpy as np
import pint
......@@ -337,12 +338,15 @@ class TextParser(FileParser):
Memory mapped representation of the file.
'''
if self._file_handler is None:
with open(self.mainfile) as f:
self._file_handler = mmap.mmap(
f.fileno(), self._file_length, access=mmap.ACCESS_COPY,
offset=self._file_offset)
# set the extra chunk loaded before the intended offset to empty
self._file_handler[:self._file_pad] = b' ' * self._file_pad
with self.open(self.mainfile) as f:
if isinstance(f, io.TextIOWrapper):
self._file_handler = mmap.mmap(
f.fileno(), self._file_length, access=mmap.ACCESS_COPY,
offset=self._file_offset)
# set the extra chunk loaded before the intended offset to empty
self._file_handler[:self._file_pad] = b' ' * self._file_pad
else:
self._file_handler = f.read()
self._file_pad = 0
return self._file_handler
......
......@@ -49,7 +49,10 @@ class XMLParser(FileParser):
if self._file_handler is None:
if self.mainfile is None:
return
self._file_handler = ElementTree.parse(self.mainfile).getroot()
try:
self._file_handler = ElementTree.parse(self.open(self.mainfile)).getroot()
except Exception:
self.logger.error('Failed to load xml file %s' % self.mainfile)
self.init_parameters()
return self._file_handler
......@@ -69,12 +72,13 @@ class XMLParser(FileParser):
Parse a quantity identified by key or an xpath-style path. Automatic conversion
can be switch off by setting convert to False.
'''
_convert = convert if convert is not None else self.convert
_convert = convert if convert is not None else self._kwargs.get('convert', None)
_convert = _convert if _convert is not None else self.convert
if self._results is None:
self._results = dict()
if not self.root:
return
return self
key_in = key
key = key.lstrip('/')
......@@ -100,7 +104,7 @@ class XMLParser(FileParser):
val.append(element.attrib)
if not val:
return
return self
def convert_value(val_in):
if isinstance(val_in, dict):
......@@ -146,3 +150,4 @@ class XMLParser(FileParser):
val = val[0] if len(val) == 1 else val
self._results[key_in] = val
return self
......@@ -21,7 +21,7 @@ import os.path
from nomad import config, datamodel
from .parser import MissingParser, BrokenParser, Parser, ArchiveParser
from .legacy import LegacyParser, VaspOutcarParser
from .legacy import LegacyParser
from .artificial import EmptyParser, GenerateRandomParser, TemplateParser, ChaosParser
from eelsdbconverter import EELSApiJsonConverter
......@@ -129,12 +129,6 @@ parsers = [
ChaosParser(),
PhonopyParser(),
VASPParser(),
VaspOutcarParser(
name='parsers/vasp-outcar', code_name='VASP', code_homepage='https://www.vasp.at/',
parser_class_name='vaspparser.VaspOutcarParser',
mainfile_name_re=r'(.*/)?OUTCAR(\.[^\.]*)?',
mainfile_contents_re=(r'^\svasp\.')
),
ExcitingParser(),
FHIAimsParser(),
LegacyParser(
......
......@@ -184,10 +184,13 @@ class LogstashFormatter(logstash.formatter.LogstashFormatterBase):
args = getattr(record, 'args', None)
if args is not None and len(args) == 5:
_, method, path_w_query, _, status_code = args
path, query_string = path_w_query.split('?', 1)
path_w_query_components = path_w_query.split('?', 1)
path = path_w_query_components[0]
if len(path_w_query_components) == 2:
query_string = path_w_query_components[1]
message['uvicorn.query_string'] = query_string
message['uvicorn.method'] = method
message['uvicorn.path'] = path
message['uvicorn.query_string'] = query_string
message['uvicorn.status_code'] = status_code
else:
# Add extra fields
......
......@@ -16,13 +16,13 @@ worker:
routing: "queue"
elastic:
host: elastic
host: elasticsearch.elasticsearch.svc.cluster.local
mongo:
host: mongo
host: rs0/mongodb-0.mongo.mongodb.svc.cluster.local,mongodb-1.mongo.mongodb.svc.cluster.local,mongodb-2.mongo.mongodb.svc.cluster.local
logstash:
host: logstash
host: logstash.elk.svc.cluster.local
dbname: nomad_dev_v0_8
......
This diff is collapsed.
......@@ -65,18 +65,19 @@ def test_band_gaps(bands_unpolarized_no_gap, bands_polarized_no_gap, bands_unpol
assert gap_ev == pytest.approx(0.62, 0.01)
assert gap.type == "indirect"
# TODO: AL I cannot find a polarized example with band gap! Previous parser got the band gap wrong.
# Polarized, finite gap, indirect
bs = bands_polarized_gap_indirect.section_run[0].section_single_configuration_calculation[0].section_k_band[0]
test_generic(bs)
assert len(bs.section_band_gap) == 2
gap_up = bs.section_band_gap[0]
gap_down = bs.section_band_gap[1]
gap_up_ev = (gap_up.value * ureg.J).to(ureg.eV).magnitude
gap_down_ev = (gap_down.value * ureg.J).to(ureg.eV).magnitude
assert gap_up.type == "indirect"
assert gap_down.type == "indirect"
assert gap_up_ev == pytest.approx(0.956, 0.01)
assert gap_down_ev == pytest.approx(1.230, 0.01)
# bs = bands_polarized_gap_indirect.section_run[0].section_single_configuration_calculation[0].section_k_band[0]
# test_generic(bs)
# assert len(bs.section_band_gap) == 2
# gap_up = bs.section_band_gap[0]
# gap_down = bs.section_band_gap[1]
# gap_up_ev = (gap_up.value * ureg.J).to(ureg.eV).magnitude
# gap_down_ev = (gap_down.value * ureg.J).to(ureg.eV).magnitude
# assert gap_up.type == "indirect"
# assert gap_down.type == "indirect"
# assert gap_up_ev == pytest.approx(0.956, 0.01)
# assert gap_down_ev == pytest.approx(1.230, 0.01)
def test_paths(band_path_cF, band_path_tP, band_path_hP):
......
......@@ -241,7 +241,7 @@ def test_vasp_incar_system():
archive = run_normalize(archive)
expected_value = 'SrTiO3' # material's formula in vasp.xml
backend_value = archive.section_run[0].section_method[0].x_vasp_incar_SYSTEM
backend_value = archive.section_run[0].section_method[0].x_vasp_incar_in['SYSTEM']
assert expected_value == backend_value
......
......@@ -33,7 +33,7 @@ def workflow_archive():
def test_no_workflow(workflow_archive):
vasp_archive = workflow_archive(
'parsers/vaspoutcar', 'tests/data/parsers/vasp_outcar/OUTCAR')
'parsers/vasp', 'tests/data/parsers/vasp_outcar/OUTCAR_broken')
assert vasp_archive.section_workflow is None
......
......@@ -41,7 +41,7 @@ parser_examples = [
('parsers/exciting', 'tests/data/parsers/exciting/nitrogen/INFO.OUT_carbon'),
('parsers/vasp', 'tests/data/parsers/vasp/vasp.xml'),
('parsers/vasp', 'tests/data/parsers/vasp_compressed/vasp.xml.gz'),
('parsers/vaspoutcar', 'tests/data/parsers/vasp_outcar/OUTCAR'),
('parsers/vasp', 'tests/data/parsers/vasp_outcar/OUTCAR'),
('parsers/fhi-aims', 'tests/data/parsers/fhi-aims/aims.out'),
('parsers/cp2k', 'tests/data/parsers/cp2k/si_bulk8.out'),
('parsers/crystal', 'tests/data/parsers/crystal/si.out'),
......@@ -85,7 +85,7 @@ for parser, mainfile in parser_examples:
parser_examples = fixed_parser_examples
correct_num_output_files = 115
correct_num_output_files = 116
class TestBackend(object):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment