Commit 873efcae authored by Markus Scheidgen's avatar Markus Scheidgen
Browse files

Merge branch 'v0.10.0' into 'master'

Merge v0.10.0 into master for release

Closes #475, #484, #497, #492, #498, and #500

See merge request !283
parents 5636e115 015742db
Pipeline #95934 passed with stage
in 2 minutes and 28 seconds
......@@ -65,18 +65,19 @@ def test_band_gaps(bands_unpolarized_no_gap, bands_polarized_no_gap, bands_unpol
assert gap_ev == pytest.approx(0.62, 0.01)
assert gap.type == "indirect"
# TODO: AL I cannot find a polarized example with band gap! Previous parser got the band gap wrong.
# Polarized, finite gap, indirect
bs = bands_polarized_gap_indirect.section_run[0].section_single_configuration_calculation[0].section_k_band[0]
test_generic(bs)
assert len(bs.section_band_gap) == 2
gap_up = bs.section_band_gap[0]
gap_down = bs.section_band_gap[1]
gap_up_ev = (gap_up.value * ureg.J).to(ureg.eV).magnitude
gap_down_ev = (gap_down.value * ureg.J).to(ureg.eV).magnitude
assert gap_up.type == "indirect"
assert gap_down.type == "indirect"
assert gap_up_ev == pytest.approx(0.956, 0.01)
assert gap_down_ev == pytest.approx(1.230, 0.01)
# bs = bands_polarized_gap_indirect.section_run[0].section_single_configuration_calculation[0].section_k_band[0]
# test_generic(bs)
# assert len(bs.section_band_gap) == 2
# gap_up = bs.section_band_gap[0]
# gap_down = bs.section_band_gap[1]
# gap_up_ev = (gap_up.value * ureg.J).to(ureg.eV).magnitude
# gap_down_ev = (gap_down.value * ureg.J).to(ureg.eV).magnitude
# assert gap_up.type == "indirect"
# assert gap_down.type == "indirect"
# assert gap_up_ev == pytest.approx(0.956, 0.01)
# assert gap_down_ev == pytest.approx(1.230, 0.01)
def test_paths(band_path_cF, band_path_tP, band_path_hP):
......
#
# Copyright The NOMAD Authors.
#
# This file is part of NOMAD. See https://nomad-lab.eu for further info.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import pytest
from nomad.normalizing import optimade
@pytest.mark.parametrize('formula, expected', [
('NaClHC', 'CHClNa'), ('NaClH2', 'ClH2Na')
])
def test_chemical_formula_hill(formula, expected):
assert optimade.optimade_chemical_formula_hill(formula) == expected
@pytest.mark.parametrize('formula, expected', [
('Na3Cl2H', 'A3B2C'), ('NaNaNaClClHH', 'A3B2C2')
])
def test_chemical_formula_anonymous(formula, expected):
assert optimade.optimade_chemical_formula_anonymous(formula) == expected
@pytest.mark.parametrize('formula, expected', [
('Na3Cl2H', 'Cl2HNa3'), ('NaNaNaClClHH', 'Cl2H2Na3')
])
def test_chemical_formula_reduced(formula, expected):
assert optimade.optimade_chemical_formula_reduced(formula) == expected
......@@ -20,7 +20,7 @@ import ase.build
from nomad import datamodel, config
from nomad.datamodel import EntryArchive
from nomad.app import dump_json
from nomad.app.flask import dump_json
from nomad.datamodel.metainfo.public import section_springer_material as SpringerMaterial
from tests.parsing.test_parsing import parsed_vasp_example # pylint: disable=unused-import
......@@ -241,7 +241,7 @@ def test_vasp_incar_system():
archive = run_normalize(archive)
expected_value = 'SrTiO3' # material's formula in vasp.xml
backend_value = archive.section_run[0].section_method[0].x_vasp_incar_SYSTEM
backend_value = archive.section_run[0].section_method[0].x_vasp_incar_in['SYSTEM']
assert expected_value == backend_value
......
......@@ -33,7 +33,7 @@ def workflow_archive():
def test_no_workflow(workflow_archive):
vasp_archive = workflow_archive(
'parsers/vaspoutcar', 'tests/data/parsers/vasp_outcar/OUTCAR')
'parsers/vasp', 'tests/data/parsers/vasp_outcar/OUTCAR_broken')
assert vasp_archive.section_workflow is None
......
......@@ -2,12 +2,12 @@ import pytest
import numpy as np
import pint
from nomad.parsing.file_parser import UnstructuredTextFileParser, Quantity, ParsePattern,\
from nomad.parsing.file_parser import TextParser, Quantity, ParsePattern,\
XMLParser
from nomad.datamodel.metainfo.public import section_system
class TestUnstructuredTextFileParser:
class TestTextParser:
@pytest.fixture(scope='class')
def mainfile(self):
return 'tests/data/parsers/exciting/Ag/INFO.OUT'
......@@ -40,7 +40,7 @@ class TestUnstructuredTextFileParser:
@pytest.fixture(scope='class')
def quantity_repeats(self):
return dict(
quantity=Quantity('total_energy', r'Total energy\s*:\s*([\d\.\-]+)'),
quantity=Quantity('total_energy', r'Total energy\s*:\s*([\d\.\-]+)', repeats=True),
value=np.array([
-5307.34855605, -5313.90710687, -5315.97055490, -5316.38701749,
-5317.59994092, -5317.26163104, -5317.26791647, -5317.26750374,
......@@ -50,14 +50,15 @@ class TestUnstructuredTextFileParser:
@pytest.fixture(scope='class')
def quantity_with_unit(self):
return dict(
quantity=Quantity('wall_time', r'Wall time \((?P<__unit>\w+)\)\s*:\s*([\d\.]+)'),
quantity=Quantity(
'wall_time', r'Wall time \((?P<__unit>\w+)\)\s*:\s*([\d\.]+)', repeats=True),
value=[pint.Quantity(v, 'seconds') for v in [
3.55, 5.32, 7.09, 8.84, 10.58, 12.33, 14.09, 15.84, 17.58,
19.33, 21.09, 22.91]])
@pytest.fixture(scope='function')
def parser(self, mainfile):
return UnstructuredTextFileParser(mainfile=mainfile)
return TextParser(mainfile=mainfile)
def test_mainfile_setter(self, parser, mainfile2):
parser.quantities = [Quantity(
......@@ -67,7 +68,7 @@ class TestUnstructuredTextFileParser:
assert parser.get('time') == '08:24:03'
def test_constructor(self, mainfile, quantity_string):
class TestParser(UnstructuredTextFileParser):
class TestParser(TextParser):
def __init__(self, **kwargs):
super().__init__(**kwargs)
......@@ -114,21 +115,21 @@ class TestUnstructuredTextFileParser:
def test_quantity_sub_parser(self, parser):
quantity_species = Quantity(
'species', r'cies :\s*([\s\S]+?)(?:Spe|Total)', repeats=True,
sub_parser=UnstructuredTextFileParser(quantities=[
sub_parser=TextParser(quantities=[
Quantity('name', r'name\s*:\s*(\w+)', repeats=False),
Quantity('mass', r'atomic mass\s*:\s*([\d\.]+)', repeats=False)]))
quantity_initialization = Quantity(
'initialization',
r'Starting initialization([\s\S]+?)Ending initialization', repeats=False,
sub_parser=UnstructuredTextFileParser(quantities=[
sub_parser=TextParser(quantities=[
Quantity('k_point_grid', r'k\-point grid\s*:\s*([\d ]+)', repeats=False),
quantity_species]))
quantity_scf = Quantity(
'scf', r'Self\-consistent loop started([\s\S]+?)Self\-consistent loop stopped',
repeats=True, sub_parser=UnstructuredTextFileParser(quantities=[
Quantity('iteration', r'SCF iteration number\s*:\s*(\d+)')]))
repeats=True, sub_parser=TextParser(quantities=[
Quantity('iteration', r'SCF iteration number\s*:\s*(\d+)', repeats=True)]))
parser.quantities = [
quantity_initialization, quantity_scf, Quantity(
......@@ -151,7 +152,7 @@ class TestUnstructuredTextFileParser:
def test_block_short(self, parser, quantity_repeats):
parser.quantities = [Quantity(
'scf', r'SCF iteration number\s*:\s*\d+([\s\S]+?)Wall time',
repeats=True, sub_parser=UnstructuredTextFileParser(quantities=[
repeats=True, sub_parser=TextParser(quantities=[
quantity_repeats.get('quantity')]))]
scf = parser.get('scf')
......
......@@ -27,7 +27,7 @@ from nomad import utils, files, datamodel
from nomad.datamodel import EntryArchive, EntryMetadata
from nomad.parsing import BrokenParser, Backend
from nomad.parsing.parsers import parser_dict, match_parser
from nomad.app import dump_json
from nomad.app.flask import dump_json
parser_examples = [
('parsers/random', 'test/data/parsers/random_0'),
......@@ -41,7 +41,7 @@ parser_examples = [
('parsers/exciting', 'tests/data/parsers/exciting/nitrogen/INFO.OUT_carbon'),
('parsers/vasp', 'tests/data/parsers/vasp/vasp.xml'),
('parsers/vasp', 'tests/data/parsers/vasp_compressed/vasp.xml.gz'),
('parsers/vaspoutcar', 'tests/data/parsers/vasp_outcar/OUTCAR'),
('parsers/vasp', 'tests/data/parsers/vasp_outcar/OUTCAR'),
('parsers/fhi-aims', 'tests/data/parsers/fhi-aims/aims.out'),
('parsers/cp2k', 'tests/data/parsers/cp2k/si_bulk8.out'),
('parsers/crystal', 'tests/data/parsers/crystal/si.out'),
......@@ -85,7 +85,7 @@ for parser, mainfile in parser_examples:
parser_examples = fixed_parser_examples
correct_num_output_files = 115
correct_num_output_files = 116
class TestBackend(object):
......
......@@ -31,6 +31,8 @@ from nomad.processing.base import task as task_decorator, FAILURE, SUCCESS
from tests.test_search import assert_search_upload
from tests.test_files import assert_upload_files
from tests.app.flask.conftest import client, oasis_central_nomad_client, session_client # pylint: disable=unused-import
from tests.app.conftest import other_test_user_auth, test_user_auth # pylint: disable=unused-import
def test_send_mail(mails, monkeypatch):
......@@ -119,6 +121,7 @@ def assert_processing(upload: Upload, published: bool = False):
# check some domain metadata
assert entry_metadata.n_atoms > 0
assert len(entry_metadata.atoms) > 0
assert len(entry_metadata.processing_errors) == 0
assert upload.get_calc(calc.calc_id) is not None
......@@ -530,6 +533,8 @@ def test_task_failure(monkeypatch, uploaded, task, proc_infra, test_user, with_e
assert 'section_metadata' in calc_archive
assert calc_archive['section_metadata']['dft']['code_name'] not in [
config.services.unavailable_value, config.services.not_processed_value]
if task != 'cleanup':
assert len(calc_archive['section_metadata']['processing_errors']) > 0
assert 'processing_logs' in calc_archive
if task != 'parsing':
assert 'section_run' in calc_archive
......
......@@ -427,3 +427,7 @@ def test_compute_required_incomplete(archive):
})
assert required is not None
def test_compute_required_full():
assert compute_required_with_referenced('*') is None
......@@ -21,7 +21,6 @@ import pytest
import click.testing
import json
import datetime
import zipfile
import time
from nomad import search, processing as proc, files
......@@ -29,7 +28,10 @@ from nomad.cli import cli
from nomad.cli.cli import POPO
from nomad.processing import Upload, Calc
from tests.app.test_app import BlueprintClient
from tests.app.flask.test_app import BlueprintClient
from tests.app.flask.conftest import ( # pylint: disable=unused-import
test_user_bravado_client, client, session_client, admin_user_bravado_client) # pylint: disable=unused-import
from tests.app.conftest import test_user_auth, admin_user_auth # pylint: disable=unused-import
# TODO there is much more to test
......@@ -66,13 +68,6 @@ class TestAdmin:
cli, ['admin', 'reset'], catch_exceptions=False)
assert result.exit_code == 1
# def test_remove(self, reset_infra):
# result = click.testing.CliRunner().invoke(
# cli, ['admin', 'reset', '--remove', '--i-am-really-sure'], catch_exceptions=False)
# assert result.exit_code == 0
# # allow other test to re-establish a connection
# mongoengine.disconnect_all()
def test_clean(self, published):
upload_id = published.upload_id
......@@ -117,21 +112,6 @@ class TestAdmin:
with files.UploadFiles.get(upload_id=upload_id).read_archive(calc_id=calc.calc_id) as archive:
assert calc.calc_id in archive
def test_index(self, published):
upload_id = published.upload_id
calc = Calc.objects(upload_id=upload_id).first()
calc.metadata['comment'] = 'specific'
calc.save()
assert search.SearchRequest().search_parameter('comment', 'specific').execute()['total'] == 0
result = click.testing.CliRunner().invoke(
cli, ['admin', 'index', '--threads', '2'], catch_exceptions=False)
assert result.exit_code == 0
assert 'index' in result.stdout
assert search.SearchRequest().search_parameter('comment', 'specific').execute()['total'] == 1
def test_delete_entry(self, published):
upload_id = published.upload_id
calc = Calc.objects(upload_id=upload_id).first()
......@@ -145,6 +125,11 @@ class TestAdmin:
assert Calc.objects(calc_id=calc.calc_id).first() is None
def transform_for_index_test(calc):
calc.comment = 'specific'
return calc
@pytest.mark.usefixtures('reset_config', 'no_warn')
class TestAdminUploads:
......@@ -203,24 +188,6 @@ class TestAdminUploads:
assert Upload.objects(upload_id=upload_id).first() is None
assert Calc.objects(upload_id=upload_id).first() is None
def test_msgpack(self, published):
upload_id = published.upload_id
upload_files = files.UploadFiles.get(upload_id=upload_id)
for access in ['public', 'restricted']:
zip_path = upload_files._file_object('archive', access, 'json', 'zip').os_path
with zipfile.ZipFile(zip_path, mode='w') as zf:
for i in range(0, 2):
with zf.open('%d_%s.json' % (i, access), 'w') as f:
f.write(json.dumps(dict(archive='test')).encode())
result = click.testing.CliRunner().invoke(
cli, ['admin', 'uploads', 'msgpack', upload_id], catch_exceptions=False)
assert result.exit_code == 0
assert 'wrote msgpack archive' in result.stdout
with upload_files.read_archive('0_public') as archive:
assert archive['0_public'].to_dict() == dict(archive='test')
def test_index(self, published):
upload_id = published.upload_id
calc = Calc.objects(upload_id=upload_id).first()
......@@ -236,6 +203,21 @@ class TestAdminUploads:
assert search.SearchRequest().search_parameters(comment='specific').execute()['total'] == 1
def test_index_with_transform(self, published):
upload_id = published.upload_id
assert search.SearchRequest().search_parameters(comment='specific').execute()['total'] == 0
result = click.testing.CliRunner().invoke(
cli, [
'admin', 'uploads', 'index',
'--transformer', 'tests.test_cli.transform_for_index_test',
upload_id],
catch_exceptions=False)
assert result.exit_code == 0
assert 'index' in result.stdout
assert search.SearchRequest().search_parameters(comment='specific').execute()['total'] == 1
def test_re_process(self, published, monkeypatch):
monkeypatch.setattr('nomad.config.meta.version', 'test_version')
upload_id = published.upload_id
......
......@@ -23,7 +23,9 @@ from nomad.metainfo import MSection, SubSection
from nomad.datamodel import EntryArchive, User
from nomad.datamodel.metainfo.public import section_run
from tests.app.test_app import BlueprintClient
from tests.app.flask.conftest import client, session_client # pylint: disable=unused-import
from tests.app.conftest import other_test_user_auth, test_user_auth # pylint: disable=unused-import
from tests.app.flask.test_app import BlueprintClient
from tests.processing import test_data as test_processing
......
......@@ -17,17 +17,75 @@
#
import pytest
import os
import yaml
from nomad import config
from .utils import assert_log
@pytest.fixture
def with_config():
old_value = config.fs.public
old_values = config.fs.public, config.fs.archive_version_suffix, config.auxfile_cutoff
yield config
config.fs.public = old_value
config.fs.public, config.fs.archive_version_suffix, config.auxfile_cutoff = old_values
def test_apply(with_config, caplog):
config._apply('fs_public', 'test_value')
assert config.fs.public == 'test_value'
config._apply('fs_archive_version_suffix', 'test_value')
assert config.fs.archive_version_suffix == 'test_value'
config._apply('auxfile_cutoff', '200')
assert config.auxfile_cutoff == 200
config._apply('does_not_exist', 'test_value')
assert_log(caplog, 'ERROR', 'does_not_exist does not exist')
config._apply('fs_does_not_exist', 'test_value')
assert_log(caplog, 'ERROR', 'fs_does_not_exist does not exist')
config._apply('max_entry_download', 'not_a_number')
assert_log(caplog, 'ERROR', 'cannot set')
config._apply('nounderscore', 'test_value')
assert_log(caplog, 'ERROR', 'nounderscore does not exist')
def test_env(with_config, monkeypatch):
monkeypatch.setattr('os.environ', dict(NOMAD_FS_PUBLIC='test_value'))
os.environ['NOMAD_FS_PUBLIC'] = 'test_value'
config._apply_env_variables()
assert config.fs.public == 'test_value'
def test_nomad_yaml(raw_files, with_config, monkeypatch, caplog):
config_data = {
'fs': {
'public': 'test_value',
'archive_version_suffix': 'test_value',
'does_not_exist': 'test_value'
},
'auxfile_cutoff': '200',
'does_not_exist': 'test_value',
'max_entry_download': 'not_a_number'
}
test_nomad_yaml = os.path.join(config.fs.tmp, 'nomad_test.yaml')
monkeypatch.setattr('os.environ', dict(NOMAD_CONFIG=test_nomad_yaml))
with open(test_nomad_yaml, 'w') as file:
yaml.dump(config_data, file)
config.load_config()
os.remove(test_nomad_yaml)
def test_apply(with_config):
config.apply('fs_public', 'test_value')
assert config.fs.public == 'test_value'
assert config.fs.archive_version_suffix == 'test_value'
assert config.auxfile_cutoff == 200
assert_log(caplog, 'ERROR', 'does_not_exist does not exist')
assert_log(caplog, 'ERROR', 'fs_does_not_exist does not exist')
assert_log(caplog, 'ERROR', 'cannot set')
......@@ -16,7 +16,7 @@
# limitations under the License.
#
from typing import Generator, Any, Dict, Tuple, Iterable
from typing import Generator, Any, Dict, Tuple, Iterable, List
import os
import os.path
import shutil
......@@ -25,7 +25,7 @@ import itertools
import zipfile
import re
from nomad import config, datamodel
from nomad import config, datamodel, utils
from nomad.files import DirectoryObject, PathObject
from nomad.files import StagingUploadFiles, PublicUploadFiles, UploadFiles, Restricted, \
ArchiveBasedStagingUploadFiles
......@@ -445,6 +445,25 @@ class TestPublicUploadFiles(UploadFilesContract):
with pytest.raises(KeyError):
StagingUploadFiles(upload_files.upload_id)
def test_archive_version_suffix(self, monkeypatch, test_upload_id):
monkeypatch.setattr('nomad.config.fs.archive_version_suffix', 'test_suffix')
_, entries, upload_files = create_staging_upload(test_upload_id, calc_specs='rp')
upload_files.pack(entries)
upload_files.delete()
public_upload_files = PublicUploadFiles(test_upload_id, is_authorized=lambda: False)
assert os.path.exists(public_upload_files.join_file('raw-public.plain.zip').os_path)
assert os.path.exists(public_upload_files.join_file('raw-restricted.plain.zip').os_path)
assert not os.path.exists(public_upload_files.join_file('raw-public-test_suffix.plain.zip').os_path)
assert not os.path.exists(public_upload_files.join_file('raw-restricted-test_suffix.plain.zip').os_path)
assert os.path.exists(public_upload_files.join_file('archive-public-test_suffix.msg.msg').os_path)
assert os.path.exists(public_upload_files.join_file('archive-restricted-test_suffix.msg.msg').os_path)
assert not os.path.exists(public_upload_files.join_file('archive-public-test.msg.msg').os_path)
assert not os.path.exists(public_upload_files.join_file('archive-restricted.msg.msg').os_path)
assert_upload_files(test_upload_id, entries, PublicUploadFiles)
def assert_upload_files(
upload_id: str, entries: Iterable[datamodel.EntryMetadata], cls,
......@@ -480,3 +499,88 @@ def assert_upload_files(
assert calc.with_embargo or isinstance(upload_files, StagingUploadFiles)
upload_files.close()
def create_test_upload_files(
upload_id: str,
archives: List[datamodel.EntryArchive],
published: bool = True,
template_files: str = example_file,
template_mainfile: str = example_file_mainfile) -> UploadFiles:
'''
Creates an upload_files object and the underlying files for test/mock purposes.
Arguments:
upload_id: The upload id for the upload. Will generate a random UUID if None.
archives: A list of class:`datamodel.EntryArchive` metainfo objects. This will
be used to determine the mainfiles. Will create respective directories and
copy the template calculation to create raw files for each archive.
Will also be used to fill the archives in the create upload.
published: Creates a :class:`PublicUploadFiles` object with published files
instead of a :class:`StagingUploadFiles` object with staging files. Default
is published.
template_files: A zip file with example files in it. One directory will be used
as a template. It will be copied for each given archive.
template_mainfile: Path of the template mainfile within the given template_files.
'''
if upload_id is None: upload_id = utils.create_uuid()
if archives is None: archives = []
upload_files = ArchiveBasedStagingUploadFiles(
upload_id, upload_path=template_files, create=True)
upload_files.extract()
upload_raw_files = upload_files.join_dir('raw')
source = upload_raw_files.join_dir(os.path.dirname(template_mainfile)).os_path
for archive in archives:
# create a copy of the given template files for each archive
mainfile = archive.section_metadata.mainfile
assert mainfile is not None, 'Archives to create test upload must have a mainfile'
target = upload_raw_files.join_file(os.path.dirname(mainfile)).os_path
if os.path.exists(target):
for file_ in os.listdir(source):
shutil.copy(os.path.join(source, file_), target)
else:
shutil.copytree(source, target)
os.rename(
os.path.join(target, os.path.basename(template_mainfile)),
os.path.join(target, os.path.basename(mainfile)))
# create an archive "file" for each archive
calc_id = archive.section_metadata.calc_id
assert calc_id is not None, 'Archives to create test upload must have a calc id'
upload_files.write_archive(calc_id, archive.m_to_dict())
# remove the template
shutil.rmtree(source)
if published:
upload_files.pack([archive.section_metadata for archive in archives])
upload_files.delete()
return UploadFiles.get(upload_id)
return upload_files
def test_test_upload_files(raw_files_infra):
upload_id = utils.create_uuid()
archives: datamodel.EntryArchive = []
for index in range(0, 3):
archive = datamodel.EntryArchive()
metadata = archive.m_create(datamodel.EntryMetadata)
metadata.calc_id = 'example_calc_id_%d' % index
metadata.mainfile = 'test/test/calc_%d/mainfile_%d.json' % (index, index)
archives.append(archive)
upload_files = create_test_upload_files(upload_id, archives)
try:
assert_upload_files(
upload_id,
[archive.section_metadata for archive in archives],
PublicUploadFiles)
finally:
if upload_files.exists():
upload_files.delete()
......@@ -20,9 +20,12 @@ from typing import List, Iterable
from elasticsearch_dsl import Q
import pytest
from datetime import datetime
import json
from nomad import datamodel, search, processing, infrastructure, config
from nomad.search import entry_document, SearchRequest
from nomad import datamodel, processing, infrastructure, config
from nomad.metainfo import search_extension
from nomad.search import entry_document, SearchRequest, search, flat, update_by_query, refresh
from nomad.app.v1.models import WithQuery
def test_init_mapping(elastic):
......@@ -43,7 +46,7 @@ def test_index_normalized_calc(elastic, normalized: datamodel.EntryArchive):
domain='dft', upload_id='test upload id', calc_id='test id')
entry_metadata.apply_domain_metadata(normalized)
search_entry = create_entry(entry_metadata)
entry = search.flat(search_entry.to_dict())
entry = flat(search_entry.to_dict())