Commit 067756d0 authored by Alvin Noe Ladines's avatar Alvin Noe Ladines
Browse files

Implemented additional optimade endpoints

parent c00ba593
Pipeline #74896 failed with stages
in 17 minutes and 29 seconds
......@@ -22,8 +22,10 @@ from nomad.datamodel import OptimadeEntry
from .api import api, url
from .models import json_api_single_response_model, entry_listing_endpoint_parser, Meta, \
Links, CalculationDataObject, single_entry_endpoint_parser, base_endpoint_parser, \
json_api_info_response_model, json_api_list_response_model
Links as LinksModel, CalculationDataObject, single_entry_endpoint_parser, base_endpoint_parser, \
json_api_info_response_model, json_api_list_response_model, ReferenceObject, StructureObject, \
ToplevelLinks, LinkObject, json_api_links_response_model, json_api_references_response_model, \
json_api_structure_response_model, json_api_structures_response_model
from .filterparser import parse_filter, FilterException
......@@ -114,7 +116,7 @@ class CalculationList(Resource):
returned=len(results),
available=available,
last_id=results[-1].calc_id if available > 0 else None),
links=Links(
links=LinksModel(
'calculations',
available=available,
page_number=page_number,
......@@ -211,3 +213,176 @@ class Info(Resource):
meta=Meta(query=request.url, returned=1),
data=result
), 200
def execute_search(**kwargs):
filter = kwargs.get('filter')
page_number = kwargs.get('page_number')
page_limit = kwargs.get('page_limit')
search_request = base_search_request().include('calc_id', 'upload_id')
if filter is not None:
try:
search_request.query(parse_filter(filter))
except FilterException as e:
abort(400, message='Could not parse filter expression: %s' % str(e))
result = search_request.execute_paginated(
page=page_number,
per_page=page_limit)
# order_by='optimade.%s' % sort) # TODO map the Optimade property
return result
@ns.route('/references')
class References(Resource):
@api.doc('references')
@api.response(400, 'Invalid requests, e.g. bad parameter.')
@api.response(422, 'Validation error')
@api.expect(entry_listing_endpoint_parser, validate=True)
@api.marshal_with(json_api_references_response_model, skip_none=True, code=200)
def get(self):
'''Retrive the references corresponding to the structures that match the given Optimade filter expression'''
try:
filter = request.args.get('filter', None)
page_limit = int(request.args.get('page_limit', 10))
page_number = int(request.args.get('page_number', 1))
sort = request.args.get('sort', 'chemical_formula_reduced'),
except Exception:
abort(400, message='bad parameter types') # TODO Specific json API error handling
result = execute_search(
filter=filter, page_limit=page_limit, page_number=page_number, sort=sort)
available = result['pagination']['total']
results = to_calc_with_metadata(result['results'])
assert len(results) == len(result['results']), 'Mongodb and elasticsearch are not consistent'
return dict(
meta=Meta(
query=request.url,
returned=len(results),
available=available,
last_id=results[-1].calc_id if available > 0 else None),
links=ToplevelLinks(
'structures',
available=available,
page_number=page_number,
page_limit=page_limit,
sort=sort, filter=filter),
data=[ReferenceObject(d) for d in results]
), 200
@ns.route('/links')
class Links(Resource):
@api.doc('links')
@api.response(400, 'Invalid requests, e.g. bad parameter.')
@api.response(422, 'Validation error')
@api.expect(entry_listing_endpoint_parser, validate=True)
@api.marshal_with(json_api_links_response_model, skip_none=True, code=200)
def get(self):
'''Retrive the links that corresponding to the structures that match the given Optimade filter expression'''
try:
filter = request.args.get('filter', None)
page_limit = int(request.args.get('page_limit', 10))
page_number = int(request.args.get('page_number', 1))
sort = request.args.get('sort', 'chemical_formula_reduced'),
except Exception:
abort(400, message='bad parameter types') # TODO Specific json API error handling
result = execute_search(
filter=filter, page_limit=page_limit, page_number=page_number, sort=sort)
available = result['pagination']['total']
results = to_calc_with_metadata(result['results'])
assert len(results) == len(result['results']), 'Mongodb and elasticsearch are not consistent'
return dict(
meta=Meta(
query=request.url,
returned=len(results),
available=available,
last_id=results[-1].calc_id if available > 0 else None),
links=ToplevelLinks(
'structures',
available=available,
page_number=page_number,
page_limit=page_limit,
sort=sort, filter=filter
),
data=[LinkObject(d, page_number=page_number, sort=sort, filter=filter) for d in results]
)
@ns.route('/structures')
class Structures(Resource):
@api.doc('structures')
@api.response(400, 'Invalid requests, e.g. bad parameter.')
@api.response(422, 'Validation error')
@api.expect(entry_listing_endpoint_parser, validate=True)
@api.marshal_with(json_api_structures_response_model, skip_none=True, code=200)
def get(self):
''' Retrieve the structures that match the given Optimade filter expression. '''
request_fields = base_request_args()
try:
filter = request.args.get('filter', None)
page_limit = int(request.args.get('page_limit', 10))
page_number = int(request.args.get('page_number', 1))
sort = request.args.get('sort', 'chemical_formula_reduced'),
except Exception:
abort(400, message='bad parameter types') # TODO Specific json API error handling
result = execute_search(
filter=filter, page_limit=page_limit, page_number=page_number, sort=sort)
available = result['pagination']['total']
results = to_calc_with_metadata(result['results'])
assert len(results) == len(result['results']), 'Mongodb and elasticsearch are not consistent'
return dict(
meta=Meta(
query=request.url,
returned=len(results),
available=available,
last_id=results[-1].calc_id if available > 0 else None),
links=ToplevelLinks(
'structures',
available=available,
page_number=page_number,
page_limit=page_limit,
sort=sort, filter=filter
),
data=[StructureObject(d, request_fields) for d in results]
), 200
@ns.route('/structures/<string:id>')
class Structure(Resource):
@api.doc('structure')
@api.response(400, 'Invalid requests, e.g. bad parameter.')
@api.response(404, 'Id does not exist.')
@api.expect(single_entry_endpoint_parser, validate=True)
@api.marshal_with(json_api_structure_response_model, skip_none=True, code=200)
def get(self, id: str):
''' Retrieve a single calculation for the given id. '''
request_fields = base_request_args()
search_request = base_search_request().search_parameters(calc_id=id)
result = search_request.execute_paginated(
page=1,
per_page=1)
available = result['pagination']['total']
results = to_calc_with_metadata(result['results'])
assert len(results) == len(result['results']), 'Mongodb and elasticsearch are not consistent'
if available == 0:
abort(404, 'The calculation with id %s does not exist' % id)
return dict(
meta=Meta(query=request.url, returned=1),
data=StructureObject(results[0], request_fields=request_fields)
), 200
......@@ -133,6 +133,20 @@ class Meta():
maintainer=dict(email='markus.scheidgen@physik.hu-berlin.de'))
class ToplevelLinks:
def __init__(self, endpoint: str, available: int, page_number: int, page_limit: int, **kwargs):
last_page = math.ceil(available / page_limit)
rest = dict(page_limit=page_limit)
rest.update(**{key: value for key, value in kwargs.items() if value is not None})
self.self = url()
self.related = None
self.first = url(endpoint, page_number=1, **rest)
self.last = url(endpoint, page_number=last_page, **rest)
self.prev = url(endpoint, page_number=max((page_number - 1, 1)), **rest)
self.next = url(endpoint, page_number=min((page_number + 1, last_page)), **rest)
json_api_links_model = api.model('Links', {
'base_url': fields.String(
description='The base URL of the implementation'),
......@@ -233,6 +247,29 @@ json_api_calculation_info_model = api.model('CalculationInfo', {
})
json_api_resource_model = api.model('Resource', {
'id': fields.String(
description='The id of the object.'),
'type': fields.String(
description='The type of the object.'),
'links': fields.Raw(
description='Links related to the resource.'
),
'meta': fields.Raw(
description='Meta information about the resource.'
),
'attributes': fields.Raw(
description='A dictionary, containing key-value pairs representing the entry details.'),
'relationships': fields.Raw(
description='A dictionary containing references to other entries.'
)
})
class CalculationDataObject:
def __init__(self, calc: EntryMetadata, request_fields: Set[str] = None):
......@@ -252,6 +289,52 @@ class CalculationDataObject:
self.attributes = attrs
class StructureObject:
def __init__(self, calc: EntryMetadata, request_fields: Set[str] = None):
optimade_quantities = calc.dft.optimade.m_to_dict()
attrs = {key: val for key, val in optimade_quantities.items() if request_fields is None or key in request_fields}
attrs['last_modified'] = calc.last_processing if calc.last_processing is not None else calc.upload_time
self.type = 'structure'
self.id = calc.calc_id
self.links = None
self.meta = None
self.attributes = attrs
self.relationships = None
class ReferenceObject:
def __init__(self, calc: EntryMetadata):
attrs = dict(
immutable_id=calc.calc_id,
last_modified=calc.last_processing if calc.last_processing is not None else calc.upload_time,
authors=calc.authors)
self.type = 'calculation'
self.id = calc.calc_id
self.links = None
self.meta = None
self.attributes = attrs
self.relationships = None
class LinkObject:
def __init__(self, calc: EntryMetadata, page_number: int, **kwargs):
attrs = dict(
name='Calculation %s' % calc.calc_id,
description='Calculation entry in NOMAD database.',
base_url=url('structures', page_number=page_number, **kwargs),
homepage=url()
)
self.type = 'child'
self.id = calc.calc_id
self.links = None
self.meta = None
self.attributes = attrs
self.relationships = None
class Property:
@staticmethod
def from_nomad_to_optimade(name: str):
......@@ -292,6 +375,37 @@ json_api_info_response_model = api.inherit(
description=('The returned response object.'))
})
json_api_structure_response_model = api.inherit(
'Structure', json_api_response_model, {
'data': fields.Nested(
model=json_api_resource_model,
required=True,
description=('The returned structure object.'))
})
json_api_structures_response_model = api.inherit(
'Structures', json_api_response_model, {
'data': fields.List(
fields.Nested(json_api_resource_model),
required=True,
description=('The list of returned structure objects.'))
})
json_api_references_response_model = api.inherit(
'References', json_api_response_model, {
'data': fields.List(
fields.Nested(json_api_resource_model),
required=True,
description=('The list of returned reference objects.'))
})
json_api_links_response_model = api.inherit(
'Links', json_api_response_model, {
'data': fields.List(
fields.Nested(json_api_resource_model),
required=True,
description=('The list of returned link objects.'))
})
base_endpoint_parser = api.parser()
base_endpoint_parser.add_argument(
......
......@@ -214,3 +214,55 @@ def test_calculation_info_endpoint(api):
data = json.loads(rv.data)
for key in ['description', 'properties', 'formats', 'output_fields_by_format']:
assert key in data['data']
def test_references_endpoint(api, example_structures):
rv = api.get('/references')
assert rv.status_code == 200
data = json.loads(rv.data)
assert 'data' in data
assert len(data['data']) == 4
for d in data['data']:
for key in ['id', 'attributes']:
assert(d.get(key)) is not None
assert 'last_modified' in d['attributes']
def test_links_endpoint(api, example_structures):
rv = api.get('/links')
assert rv.status_code == 200
data = json.loads(rv.data)
assert 'data' in data
assert len(data['data']) == 4
for d in data['data']:
for key in ['id', 'type', 'attributes']:
assert d.get(key) is not None
for key in ['name', 'description', 'base_url', 'homepage']:
assert key in d['attributes']
def test_structures_endpoint(api, example_structures):
rv = api.get('/structures')
assert rv.status_code == 200
data = json.loads(rv.data)
assert len(data['data']) == 4
for d in data['data']:
for key in ['id', 'attributes']:
assert d.get(key) is not None
required_keys = [
'last_modified', 'elements', 'nelements', 'elements_ratios', 'chemical_formula_descriptive',
'chemical_formula_reduced', 'chemical_formula_anonymous', 'dimension_types',
'cartesian_site_positions', 'nsites', 'species_at_sites', 'species', 'structure_features']
for key in required_keys:
assert key in d['attributes']
def test_structure_endpoint(api, example_structures):
rv = api.get('/structures/%s' % 'test_calc_id_1')
assert rv.status_code == 200
data = json.loads(rv.data)
assert data.get('data') is not None
attr = data['data'].get('attributes')
assert attr is not None
assert attr.get('elements') == ['H', 'O']
assert len(attr.get('dimension_types')) == 3
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment