Commit 68f6b652 authored by Lauri Himanen's avatar Lauri Himanen
Browse files

Merge branch 'v0.8.0' into encyclopedia

parents 385ec99b b0e40791
Pipeline #74413 passed with stages
in 36 minutes and 30 seconds
from nomad import config
from nomad.client import query_archive
from nomad.client import ArchiveQuery
from nomad.metainfo import units
# this will not be necessary, once this is the official NOMAD version
......@@ -13,10 +13,11 @@ query = ArchiveQuery(
'atoms': ['O']
},
required={
'section_run': {
'section_single_configuration_calculation[0]': {
'section_run[0]': {
'section_single_configuration_calculation[-2]': {
'energy_total': '*'
}
},
'section_system[-2]': '*'
}
},
per_page=10,
......@@ -25,5 +26,7 @@ query = ArchiveQuery(
print(query)
for result in query[0:10]:
energy = result.section_run[0].section_single_configuration_calculation[0].energy_total
print('Energy %s' % energy.to(units.hartree))
run = result.section_run[0]
energy = run.section_single_configuration_calculation[0].energy_total
formula = run.section_system[0].chemical_composition_reduced
print('%s: energy %s' % (formula, energy.to(units.hartree)))
......@@ -217,7 +217,7 @@ _archive_query_model = api.inherit('ArchiveSearch', search_model, {
'query': fields.Nested(query_model, description='The query used to find the requested entries.', skip_none=True),
'required': fields.Raw(description='A dictionary that defines what archive data to retrive.'),
'query_schema': fields.Raw(description='Deprecated, use required instead.'),
'raise_errors': fields.Boolean(description='Return 401 on missing archives or 500 on other errors instead of skipping the entry.')
'raise_errors': fields.Boolean(description='Return 404 on missing archives or 500 on other errors instead of skipping the entry.')
})
......@@ -259,7 +259,7 @@ class ArchiveQueryResource(Resource):
else:
required = data_in.get('query_schema', '*')
raise_error = data_in.get('raise_error', True)
raise_errors = data_in.get('raise_errors', False)
except Exception:
abort(400, message='bad parameter types')
......@@ -328,15 +328,15 @@ class ArchiveQueryResource(Resource):
except ArchiveQueryError as e:
abort(400, str(e))
except KeyError:
if raise_error:
abort(401, 'Archive for entry %s does not exist' % calc_id)
if raise_errors:
abort(404, 'Archive for entry %s does not exist' % calc_id)
# We simply skip this entry
pass
except Restricted:
# TODO in reality this should not happen
pass
except Exception as e:
if raise_error:
if raise_errors:
raise e
common.logger(str(e), exc_info=e)
......
......@@ -538,6 +538,13 @@ __query_archive_key_pattern = re.compile(r'^([\s\w\-]+)(\[([-?0-9]*)(:([-?0-9]*)
def query_archive(f_or_archive_reader: Union[str, ArchiveReader, BytesIO], query_dict: dict, **kwargs):
def _fix_index(index, length):
if index is None:
return index
if index < 0:
return max(-(length), index)
else:
return min(length, index)
def _to_son(data):
if isinstance(data, (ArchiveList, List)):
......@@ -558,10 +565,11 @@ def query_archive(f_or_archive_reader: Union[str, ArchiveReader, BytesIO], query
# process array indices
match = __query_archive_key_pattern.match(key)
index: Tuple[int, int] = None
index: Union[Tuple[int, int], int] = None
if match:
key = match.group(1)
# check if we have indices
if match.group(2) is not None:
first_index, last_index = None, None
group = match.group(3)
......@@ -573,7 +581,7 @@ def query_archive(f_or_archive_reader: Union[str, ArchiveReader, BytesIO], query
index = (0 if first_index is None else first_index, last_index)
else:
index = (first_index, first_index + 1) # one item
index = first_index # one item
else:
index = None
......@@ -598,8 +606,16 @@ def query_archive(f_or_archive_reader: Union[str, ArchiveReader, BytesIO], query
if index is None:
pass
else:
length = len(archive_child)
if isinstance(index, tuple):
index = (_fix_index(index[0], length), _fix_index(index[1], length))
if index[0] == index[1]:
archive_child = [archive_child[index[0]]]
else:
archive_child = archive_child[index[0]: index[1]]
else:
archive_child = [archive_child[_fix_index(index, length)]]
if isinstance(archive_child, (ArchiveList, list)):
result[key] = [_load_data(val, item) for item in archive_child]
......
......@@ -40,23 +40,23 @@ This script should yield a result like this:
.. code::
Number queries entries: 7667
Number queries entries: 7628
Number of entries loaded in the last api call: 10
Bytes loaded in the last api call: 3579
Bytes loaded from this query: 3579
Bytes loaded in the last api call: 118048
Bytes loaded from this query: 118048
Number of downloaded entries: 10
Number of made api calls: 1
Energy -178.6990610734937 hartree
Energy -6551.45699684026 hartree
Energy -6551.461104765451 hartree
Energy -548.9736595672932 hartree
Energy -548.9724185656775 hartree
Energy -1510.3938165430286 hartree
Energy -1510.3937761449583 hartree
Energy -11467.827149010665 hartree
Energy -16684.667362890417 hartree
Energy -1510.3908614326358 hartree
Cd2O2: energy -11467.827149010665 hartree
Sr2O2: energy -6551.45699684026 hartree
Sr2O2: energy -6551.461104765451 hartree
Be2O2: energy -178.6990610734937 hartree
Ca2O2: energy -1510.3938165430286 hartree
Ca2O2: energy -1510.3937761449583 hartree
Ba2O2: energy -16684.667362890417 hartree
Mg2O2: energy -548.9736595672932 hartree
Mg2O2: energy -548.9724185656775 hartree
Ca2O2: energy -1510.3908614326358 hartree
Let's discuss the different elements here. First, we have a set of imports. The NOMAD source
codes comes with various sub-modules. The `client` module contains everything related
......@@ -266,6 +266,24 @@ class ArchiveQuery(collections.abc.Sequence):
self.query['query'].update(query)
if required is not None:
self.query['query_schema'] = required
# We try to add all required properties to the query to ensure that only
# results with those properties are returned.
section_run_key = next(key for key in required if key.split('[')[0] == 'section_run')
if section_run_key is not None:
# add all quantities in required to the query part
quantities = {'section_run'}
stack = []
section_run = required[section_run_key]
if isinstance(section_run, dict):
stack.append(section_run)
while len(stack) > 0:
required_dict = stack.pop()
for key, value in required_dict.items():
if isinstance(value, dict):
stack.append(value)
quantities.add(key.split('[')[0])
self.query['query'].setdefault('dft.quantities', []).extend(quantities)
self.query['query']['domain'] = 'dft'
self.password = password
self.username = username
......
......@@ -721,10 +721,10 @@ class TestArchive(UploadFilesBasedTests):
calc_id='test_id', published=True, with_embargo=False)
entry_metadata.a_elastic.index(refresh=True)
rv = api.post(uri, content_type='application/json', data=json.dumps(dict(per_page=5, raise_error=True)))
assert rv.status_code == 401
rv = api.post(uri, content_type='application/json', data=json.dumps(dict(per_page=5, raise_errors=True)))
assert rv.status_code == 404
rv = api.post(uri, content_type='application/json', data=json.dumps(dict(per_page=5, raise_error=False)))
rv = api.post(uri, content_type='application/json', data=json.dumps(dict(per_page=5, raise_errors=False)))
assert rv.status_code == 200
......
......@@ -236,9 +236,10 @@ test_query_example: Dict[Any, Any] = {
({'c1': {'s1': {'ss1[:2]': '*'}}}, {'c1': {'s1': {'ss1': test_query_example['c1']['s1']['ss1'][:2]}}}),
({'c1': {'s1': {'ss1[0:2]': '*'}}}, {'c1': {'s1': {'ss1': test_query_example['c1']['s1']['ss1'][0:2]}}}),
({'c1': {'s1': {'ss1[-2]': '*'}}}, {'c1': {'s1': {'ss1': test_query_example['c1']['s1']['ss1'][-2:-1]}}}),
({'c1': {'s1': {'ss1[-10]': '*'}}}, {'c1': {'s1': {'ss1': test_query_example['c1']['s1']['ss1'][-2:-1]}}}),
({'c1': {'s1': {'ss1[:-1]': '*'}}}, {'c1': {'s1': {'ss1': test_query_example['c1']['s1']['ss1'][:-1]}}}),
({'c1': {'s1': {'ss1[1:-1]': '*'}}}, {'c1': {'s1': {'ss1': test_query_example['c1']['s1']['ss1'][1:-1]}}}),
({'c2': {'s1': {'ss1[-3:-1]': '*'}}}, {'c2': {'s1': {'ss1': test_query_example['c2']['s1']['ss1'][-3:-1]}}}),
({'c2': {'s1': {'ss1[-3:-1]': '*'}}}, {'c2': {'s1': {'ss1': [test_query_example['c2']['s1']['ss1'][-1]]}}}),
({'c1': {'s2[0]': {'p1': '*'}}}, {'c1': {'s2': [{'p1': test_query_example['c1']['s2'][0]['p1']}]}}),
({'c1': {'s3': '*'}}, {'c1': {}}),
({'c1': {'s1[0]': '*'}}, ArchiveQueryError())
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment