Commit f07d9d12 authored by Markus Scheidgen's avatar Markus Scheidgen
Browse files

Refactored config meta fields.

parent 1788548c
Pipeline #76401 passed with stages
in 29 minutes and 1 second
......@@ -98,7 +98,7 @@ query_model_fields = {
query_model_fields.update(**{
'owner': fields.String(description='The group the calculations belong to.', allow_null=True, skip_none=True),
'domain': fields.String(description='Specify the domain to search in: %s, default is ``%s``' % (
', '.join(['``%s``' % domain for domain in datamodel.domains]), config.default_domain)),
', '.join(['``%s``' % domain for domain in datamodel.domains]), config.meta.default_domain)),
'from_time': fields.Raw(description='The minimum entry time.', allow_null=True, skip_none=True),
'until_time': fields.Raw(description='The maximum entry time.', allow_null=True, skip_none=True)
})
......@@ -138,7 +138,7 @@ def add_search_parameters(request_parser):
'domain', type=str,
help='Specify the domain to search in: %s, default is ``%s``' % (
', '.join(['``%s``' % domain for domain in datamodel.domains]),
config.default_domain))
config.meta.default_domain))
request_parser.add_argument(
'owner', type=str,
help='Specify which calcs to return: ``visible``, ``public``, ``all``, ``user``, ``staging``, default is ``visible``')
......@@ -338,7 +338,7 @@ def query_api_clientlib(**kwargs):
kwargs = {
key: normalize_value(key, value) for key, value in kwargs.items()
if key in search.search_quantities and (key != 'domain' or value != config.default_domain)
if key in search.search_quantities and (key != 'domain' or value != config.meta.default_domain)
}
out = io.StringIO()
......
......@@ -131,8 +131,8 @@ class InfoResource(Resource):
for s in search.search_quantities.values()
if 'optimade' not in s.qualified_name
},
'version': config.version,
'release': config.release,
'version': config.meta.version,
'release': config.meta.release,
'git': {
'ref': gitinfo.ref,
'version': gitinfo.version,
......
......@@ -25,18 +25,20 @@ base_url = 'http://%s/%s/optimade' % (
config.services.api_base_path.strip('/'))
def url(endpoint: str = None, version='v0', **kwargs):
def url(endpoint: str = None, version='v0', prefix=None, **kwargs):
''' Returns the full optimade api url (for a given endpoint) including query parameters. '''
if endpoint is None:
if version is not None:
url = '%s/%s' % (base_url, version)
else:
url = base_url
if endpoint is not None:
url = '/' + endpoint
else:
if version is not None:
url = '%s/%s/%s' % (base_url, version, endpoint)
else:
url = '%s/%s' % (base_url, endpoint)
url = ''
if version is not None:
url = '/' + version + url
if prefix is not None:
url = '/' + prefix + url
url = base_url + url
if len(kwargs) > 0:
return '%s?%s' % (url, urllib.parse.urlencode(kwargs))
......
......@@ -17,14 +17,14 @@ from flask_restplus import Resource, abort
from flask import request
from elasticsearch_dsl import Q
from nomad import search, files, datamodel
from nomad import search, files, datamodel, config
from nomad.datamodel import OptimadeEntry
from .api import api, url, base_request_args
from .models import json_api_single_response_model, entry_listing_endpoint_parser, Meta, \
Links as LinksModel, CalculationDataObject, single_entry_endpoint_parser, base_endpoint_parser, \
json_api_info_response_model, json_api_list_response_model, ReferenceObject, StructureObject, \
ToplevelLinks, LinkObject, json_api_links_response_model, json_api_references_response_model, \
ToplevelLinks, json_api_references_response_model, \
json_api_structure_response_model, json_api_structures_response_model
from .filterparser import parse_filter, FilterException
......@@ -274,43 +274,31 @@ class References(Resource):
class Links(Resource):
@api.doc('links')
@api.response(400, 'Invalid requests, e.g. bad parameter.')
@api.response(422, 'Validation error')
@api.expect(entry_listing_endpoint_parser, validate=True)
@api.marshal_with(json_api_links_response_model, skip_none=True, code=200)
@api.expect(base_endpoint_parser, validate=True)
@api.marshal_with(json_api_list_response_model, skip_none=True, code=200)
def get(self):
'''Retrive the links that corresponding to the structures that match the given Optimade filter expression'''
try:
filter = request.args.get('filter', None)
page_limit = int(request.args.get('page_limit', 10))
page_number = int(request.args.get('page_number', 1))
sort = request.args.get('sort', 'chemical_formula_reduced'),
except Exception:
abort(400, message='bad parameter types') # TODO Specific json API error handling
''' Returns information relating to the API implementation- '''
base_request_args()
result = execute_search(
filter=filter, page_limit=page_limit, page_number=page_number, sort=sort)
available = result['pagination']['total']
results = to_calc_with_metadata(result['results'])
assert len(results) == len(result['results']), 'Mongodb and elasticsearch are not consistent'
result = [
{
"type": "parent",
"id": "index",
"attributes": {
"name": config.meta.name,
"description": config.meta.description,
"base_url": {
"href": url(version=None, prefix='index'),
},
"homepage": config.meta.homepage
}
}
]
return dict(
meta=Meta(
query=request.url,
returned=len(results),
available=available,
last_id=results[-1].calc_id if available > 0 else None),
links=ToplevelLinks(
'structures',
available=available,
page_number=page_number,
page_limit=page_limit,
sort=sort, filter=filter
),
# TODO Links are about links to other optimade databases, e.g. OQMD, MP, AFLOW.
# It is not about links within NOMAD, like LinkObject suggests.
data=[LinkObject(d, page_number=page_number, sort=sort, filter=filter) for d in results]
)
meta=Meta(query=request.url, returned=1),
data=result
), 200
@ns.route('/structures')
......
......@@ -15,6 +15,8 @@
from flask_restplus import Resource
from flask import request
from nomad import config
from .api import api, url, base_request_args
from .models import json_api_single_response_model, base_endpoint_parser, json_api_single_response_model, Meta, json_api_list_response_model
......@@ -57,7 +59,7 @@ class Info(Resource):
@ns.route('/links')
class Links(Resource):
@api.doc('index_info')
@api.doc('index_links')
@api.response(400, 'Invalid requests, e.g. bad parameter.')
@api.expect(base_endpoint_parser, validate=True)
@api.marshal_with(json_api_list_response_model, skip_none=True, code=200)
......@@ -66,31 +68,21 @@ class Links(Resource):
base_request_args()
result = [
{
"type": "parent",
"id": "index",
"attributes": {
"name": "NOMAD OPTiMaDe index",
"description": "Index for NOMAD's OPTiMaDe implemenations",
"base_url": url(version=None),
"homepage": "http://nomad-coe.eu"
}
},
{
"type": "child",
"id": "v0",
"attributes": {
"name": "NOMAD OPTiMaDe v0",
"description": "Novel Materials Discovery OPTiMaDe implementations v0",
"name": config.meta.name,
"description": config.meta.description,
"base_url": {
"href": url(),
},
"homepage": "http://nomad-coe.eu"
"homepage": config.meta.homepage
}
}
]
return dict(
meta=Meta(query=request.url, returned=2),
meta=Meta(query=request.url, returned=1),
data=result
), 200
......@@ -128,7 +128,7 @@ class Meta():
self.last_id = last_id
self.implementation = dict(
name='nomad@fairdi',
version=config.version,
version=config.meta.version,
source_url='https://gitlab.mpcdf.mpg.de/nomad-lab/nomad-FAIR',
maintainer=dict(email='markus.scheidgen@physik.hu-berlin.de'))
......
......@@ -39,7 +39,7 @@ def app(debug: bool, with_chaos: int):
def run_app(**kwargs):
config.service = 'app'
config.meta.service = 'app'
from nomad import infrastructure
from nomad.app.__main__ import run_dev_server
infrastructure.setup()
......@@ -47,7 +47,7 @@ def run_app(**kwargs):
def run_worker():
config.service = 'worker'
config.meta.service = 'worker'
from nomad import processing
processing.app.worker_main(['worker', '--loglevel=INFO', '-Q', 'celery,uploads,calcs'])
......
......@@ -60,7 +60,7 @@ def uploads(
if outdated:
uploads = proc.Calc._get_collection().distinct(
'upload_id',
{'metadata.nomad_version': {'$ne': config.version}})
{'metadata.nomad_version': {'$ne': config.meta.version}})
query |= mongoengine.Q(upload_id__in=uploads)
if code is not None and len(code) > 0:
......
......@@ -97,7 +97,7 @@ class POPO(dict):
@click.option('--config', help='the config file to use')
@click.pass_context
def cli(ctx, verbose: bool, debug: bool, config: str):
nomad_config.service = os.environ.get('NOMAD_SERVICE', 'cli')
nomad_config.meta.service = os.environ.get('NOMAD_SERVICE', 'cli')
if config is not None:
nomad_config.load_config(config_file=config)
......
......@@ -261,11 +261,17 @@ datacite = NomadConfig(
password='*'
)
version = '0.8.1'
commit = gitinfo.commit
release = 'devel'
default_domain = 'dft'
service = 'unknown nomad service'
meta = NomadConfig(
version='0.8.1',
commit=gitinfo.commit,
release='devel',
default_domain='dft',
service='unknown nomad service',
name='novel materials discovery (NOMAD)',
description='A FAIR data sharing platform for materials science data',
homepage='https://nomad-coe.eu'
)
auxfile_cutoff = 100
parser_matching_size = 9128
console_log_level = logging.WARNING
......
......@@ -150,8 +150,8 @@ class Calc(Proc):
entry_metadata.upload_id = self.upload_id
entry_metadata.calc_id = self.calc_id
entry_metadata.mainfile = self.mainfile
entry_metadata.nomad_version = config.version
entry_metadata.nomad_commit = config.commit
entry_metadata.nomad_version = config.meta.version
entry_metadata.nomad_commit = config.meta.commit
entry_metadata.uploader = self.upload.user_id
entry_metadata.upload_time = self.upload.upload_time
entry_metadata.upload_name = self.upload.name
......@@ -280,8 +280,8 @@ class Calc(Proc):
try:
self._entry_metadata = self.user_metadata()
self._entry_metadata.calc_hash = self.upload_files.calc_hash(self.mainfile)
self._entry_metadata.nomad_version = config.version
self._entry_metadata.nomad_commit = config.commit
self._entry_metadata.nomad_version = config.meta.version
self._entry_metadata.nomad_commit = config.meta.commit
self._entry_metadata.last_processing = datetime.utcnow()
self._entry_metadata.files = self.upload_files.calc_files(self.mainfile)
......@@ -1151,7 +1151,7 @@ class Upload(Proc):
''' All successfully processed and outdated calculations. '''
return Calc.objects(
upload_id=self.upload_id, tasks_status=SUCCESS,
metadata__nomad_version__ne=config.version)
metadata__nomad_version__ne=config.meta.version)
@property
def calcs(self):
......
......@@ -130,7 +130,7 @@ class SearchRequest:
There is also scrolling for quantities to go through all quantity values. There is no
paging for aggregations.
'''
def __init__(self, domain: str = config.default_domain, query=None):
def __init__(self, domain: str = config.meta.default_domain, query=None):
self._domain = domain
self._query = query
self._search = Search(index=config.elastic.index_name)
......
......@@ -144,8 +144,8 @@ class LogstashFormatter(logstash.formatter.LogstashFormatterBase):
'logger_name': record.name,
# Nomad specific
'nomad.service': config.service,
'nomad.release': config.release
'nomad.service': config.meta.service,
'nomad.release': config.meta.release
}
if record.name.startswith('nomad'):
......@@ -238,7 +238,7 @@ def add_logstash_handler(logger):
logstash_handler = LogstashHandler(
config.logstash.host,
config.logstash.tcp_port, version=1)
logstash_handler.formatter = LogstashFormatter(tags=['nomad', config.release])
logstash_handler.formatter = LogstashFormatter(tags=['nomad', config.meta.release])
logstash_handler.setLevel(config.logstash.level)
logger.addHandler(logstash_handler)
......
......@@ -233,7 +233,7 @@ def setup_kwargs():
return dict(
name='nomad-lab',
version=config.version,
version=config.meta.version,
description='The NOvel MAterials Discovery (NOMAD) Python package',
package_dir={'': './'},
packages=['nomad.%s' % pkg for pkg in find_packages('./nomad')] + ['nomad'],
......
......@@ -475,8 +475,8 @@ class TestUploads:
self.assert_published(api, admin_user_auth, upload['upload_id'], proc_infra, {})
def test_post_re_process(self, api, published, test_user_auth, monkeypatch):
monkeypatch.setattr('nomad.config.version', 're_process_test_version')
monkeypatch.setattr('nomad.config.commit', 're_process_test_commit')
monkeypatch.setattr('nomad.config.meta.version', 're_process_test_version')
monkeypatch.setattr('nomad.config.meta.commit', 're_process_test_commit')
upload_id = published.upload_id
rv = api.post(
......
......@@ -41,7 +41,7 @@ def test_index(index_api):
rv = index_api.get('/links')
assert rv.status_code == 200
data = json.loads(rv.data)
assert data['data'][1]['attributes']['base_url']['href'].endswith('v0')
assert data['data'][0]['attributes']['base_url']['href'].endswith('optimade/v0')
def test_get_entry(published: Upload):
......@@ -247,13 +247,7 @@ def test_links_endpoint(api, example_structures):
rv = api.get('/links')
assert rv.status_code == 200
data = json.loads(rv.data)
assert 'data' in data
assert len(data['data']) == 4
for d in data['data']:
for key in ['id', 'type', 'attributes']:
assert d.get(key) is not None
for key in ['name', 'description', 'base_url', 'homepage']:
assert key in d['attributes']
assert data['data'][0]['attributes']['base_url']['href'].endswith('optimade/index')
def test_structures_endpoint(api, example_structures):
......
......@@ -29,6 +29,7 @@ import elasticsearch.exceptions
from typing import List
import json
import logging
import warnings
from nomad import config, infrastructure, parsing, processing, app, utils
from nomad.utils import structlogging
......@@ -43,6 +44,8 @@ from tests.bravado_flask import FlaskTestHttpClient
test_log_level = logging.CRITICAL
example_files = [empty_file, example_file]
warnings.simplefilter("ignore")
structlogging.ConsoleFormatter.short_format = True
setattr(logging, 'Formatter', structlogging.ConsoleFormatter)
......@@ -626,9 +629,9 @@ def published_wo_user_metadata(non_empty_processed: processing.Upload) -> proces
@pytest.fixture
def reset_config():
''' Fixture that resets configuration. '''
service = config.service
service = config.meta.service
yield None
config.service = service
config.meta.service = service
utils.set_console_log_level(test_log_level)
......
......@@ -26,8 +26,8 @@ if __name__ == '__main__':
logger.error('parsing was not successful', status=backend.status)
backend.openNonOverlappingSection('section_entry_info')
backend.addValue('upload_id', config.services.unavailable_value)
backend.addValue('calc_id', config.services.unavailable_value)
backend.addValue('upload_id', config.meta.services.unavailable_value)
backend.addValue('calc_id', config.meta.services.unavailable_value)
backend.addValue('calc_hash', "no hash")
backend.addValue('mainfile', mainfile_path)
backend.addValue('parser_name', 'parsers/vasp')
......
......@@ -260,8 +260,8 @@ def test_re_processing(published: Upload, internal_example_user_metadata, monkey
raw_files, published.upload_files.join_file('raw-restricted.plain.zip').os_path)
# reprocess
monkeypatch.setattr('nomad.config.version', 're_process_test_version')
monkeypatch.setattr('nomad.config.commit', 're_process_test_commit')
monkeypatch.setattr('nomad.config.meta.version', 're_process_test_version')
monkeypatch.setattr('nomad.config.meta.commit', 're_process_test_commit')
published.reset()
published.re_process_upload()
try:
......
......@@ -233,7 +233,7 @@ class TestAdminUploads:
assert search.SearchRequest().search_parameters(comment='specific').execute()['total'] == 1
def test_re_process(self, published, monkeypatch):
monkeypatch.setattr('nomad.config.version', 'test_version')
monkeypatch.setattr('nomad.config.meta.version', 'test_version')
upload_id = published.upload_id
calc = Calc.objects(upload_id=upload_id).first()
assert calc.metadata['nomad_version'] != 'test_version'
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment