Commit c9a5056b authored by Markus Scheidgen's avatar Markus Scheidgen
Browse files

Merged latest changes.

parents 990dcb3f e4ce6335
window.nomadEnv = {
'keycloakBase': 'https://labdev-nomad.esc.rzg.mpg.de/fairdi/keycloak/auth',
'keycloakBase': 'https://labdev-nomad.esc.rzg.mpg.de/fairdi/keycloak/auth/',
'keycloakRealm': 'fairdi_nomad_test',
'keycloakClientId': 'nomad_gui_dev',
'appBase': 'http://localhost:8000/fairdi/nomad/latest',
......
......@@ -198,8 +198,14 @@ class NavigationUnstyled extends React.Component {
}
componentDidMount() {
fetch(`${guiBase}/meta.json`)
.then((response) => response.json())
fetch(`${guiBase}/meta.json`, {
method: 'GET',
cache: 'no-cache',
headers: {
'Pragma': 'no-cache',
'Cache-Control': 'no-cache, no-store'
}
}).then((response) => response.json())
.then((meta) => {
if (meta.version !== packageJson.version) {
console.log('GUI API version mismatch')
......
......@@ -4989,10 +4989,6 @@ keycloak-js@^6.0.0:
version "6.0.1"
resolved "https://registry.yarnpkg.com/keycloak-js/-/keycloak-js-6.0.1.tgz#329a5e77210dfc4a7d4acf96f95dd0132455bea3"
keycloak@^1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/keycloak/-/keycloak-1.2.0.tgz#2ff4cc57102842f2eecc2f4bb206306596d7b025"
keycode@^2.1.7, keycode@^2.1.9:
version "2.2.0"
resolved "https://registry.yarnpkg.com/keycode/-/keycode-2.2.0.tgz#3d0af56dc7b8b8e5cba8d0a97f107204eec22b04"
......
......@@ -26,4 +26,4 @@ There is a separate documentation for the API endpoints from a client perspectiv
"""
from .api import blueprint
from . import info, auth, admin, upload, repo, archive, raw, mirror
from . import info, auth, admin, upload, repo, archive, raw, mirror, dataset
from typing import Dict, Any
from flask import request, g
from flask_restplus import Resource, fields, abort
import mongoengine as me
from nomad import utils
from nomad.metainfo import MSection, Quantity, Section
from nomad.app.utils import with_logger
from .api import api
from .auth import authenticate
from .common import pagination_model, pagination_request_parser
ns = api.namespace(
'datasets',
description='Datasets allow to create sets of related data.')
class Dataset(MSection):
""" A Dataset is attached to one or many entries to form a set of data.
Args:
dataset_id: The unique identifier for this dataset as a string. It should be
a randomly generated UUID, similar to other nomad ids.
name: The human readable name of the dataset as string. The dataset name must be
unique for the user.
user_id: The unique user_id of the owner and creator of this dataset. The owner
must not change after creation.
doi: The optional Document Object Identifier (DOI) associated with this dataset.
Nomad can register DOIs that link back to the respective representation of
the dataset in the nomad UI. This quantity holds the string representation of
this DOI. There is only one per dataset.
"""
dataset_id = Quantity(type=str, a_me=dict(primary_key=True))
name = Quantity(type=str, a_me=dict(index=True))
user_id = Quantity(type=str, a_me=dict(index=True))
doi = Quantity(type=str, a_me=dict(index=True))
def generate_flask_restplus_model(section_def: Section):
def generate_field(quantity: Quantity):
field = None
if quantity.type == int:
field = fields.Integer
elif quantity.type == float:
field = fields.Float
elif quantity.type == str:
field = fields.String
elif quantity.type == bool:
field = fields.Boolean
else:
raise NotImplementedError
result = field(description=quantity.description)
if len(quantity.shape) == 0:
return result
elif len(quantity.shape) == 1:
return fields.List(result)
else:
raise NotImplementedError
return api.model(section_def.name, {
name: generate_field(quantity)
for name, quantity in section_def.all_quantities.items()
})
dataset_model = generate_flask_restplus_model(Dataset.m_def)
dataset_list_model = api.model('DatasetList', {
'pagination': fields.Nested(model=pagination_model),
'results': fields.List(fields.Nested(model=dataset_model, skip_none=True))
})
def generate_mongoengine(section_def: Section):
def generate_field(quantity: Quantity):
annotation = quantity.m_annotations.get('me', {})
annotation.pop('index', None)
field = None
if quantity.type == int:
field = me.IntField
elif quantity.type == float:
field = me.FloatField
elif quantity.type == str:
field = me.StringField
elif quantity.type == bool:
field = me.BooleanField
else:
raise NotImplementedError
result = field(default=quantity.default, **annotation)
if len(quantity.shape) == 0:
return result
elif len(quantity.shape) == 1:
return me.ListField(result)
else:
raise NotImplementedError
indexes = [
quantity.name
for quantity in section_def.all_quantities.values()
if quantity.m_annotations.get('me', {}).get('index', False)]
dct: Dict[str, Any] = dict()
if len(indexes) > 0:
dct.update(meta=dict(indexes=indexes))
dct.update(**{
name: generate_field(quantity)
for name, quantity in section_def.all_quantities.items()
})
return type(section_def.name, (me.Document,), dct)
DatasetME = generate_mongoengine(Dataset.m_def)
@ns.route('/')
class DatasetListResource(Resource):
@api.doc('list_datasets')
@api.marshal_with(dataset_list_model, skip_none=True, code=200, description='Dateset send')
@api.expect(pagination_request_parser)
@authenticate(required=True)
def get(self):
""" Retrieve a list of all datasets of the authenticated user. """
try:
page = int(request.args.get('page', 1))
per_page = int(request.args.get('per_page', 10))
except Exception:
abort(400, message='bad parameter types')
result_query = DatasetME.objects(user_id=g.user.user_id)
return dict(
pagination=dict(total=result_query.count(), page=page, per_page=per_page),
results=result_query[(page - 1) * per_page: page * per_page]), 200
@api.doc('create_dataset')
@api.response(400, 'The provided data is malformed or a dataset with the name already exists')
@api.marshal_with(dataset_model, skip_none=True, code=200, description='Dateset send')
@api.expect(dataset_model)
@authenticate(required=True)
def put(self):
""" Creates a new dataset. """
data = request.get_json()
if data is None:
data = {}
# unique name
name = data.get('name', None)
if name is None:
abort(400, 'Must provide a dataset name.')
if DatasetME.objects(user_id=g.user.user_id, name=name).count() > 0:
abort(400, 'A dataset with name %s does already exist for the current user.' % name)
# only admin can set user or doi
if any(key in data for key in ['user_id', 'doi', 'dataset_id']):
if not g.user.is_admin():
abort(400, 'The dataset contains information you are not allowed to set.')
# no other keys
if any(key not in Dataset.m_def.all_quantities for key in data):
abort(400, 'The dataset contains unknown keys.')
if 'user_id' not in data:
data['user_id'] = g.user.user_id
dataset_id = data.pop('dataset_id', utils.create_uuid())
return DatasetME(dataset_id=dataset_id, **data).save(), 200
@ns.route('/<string:name>')
@api.doc(params=dict(name='The name of the requested dataset.'))
class DatasetResource(Resource):
@api.doc('get_dataset')
@api.response(404, 'The dataset does not exist')
@api.marshal_with(dataset_model, skip_none=True, code=200, description='Dateset send')
@authenticate(required=True)
def get(self, name: str):
""" Retrieve a dataset by name. """
result = DatasetME.objects(user_id=g.user.user_id, name=name).first()
if result is None:
abort(404, 'Dataset with name %s does not exist for current user' % name)
return result
@api.doc('assign_doi')
@api.response(404, 'The dataset does not exist')
@api.response(400, 'The dataset already has a DOI')
@api.marshal_with(dataset_model, skip_none=True, code=200, description='DOI assigned')
@authenticate(required=True)
@with_logger
def post(self, name: str, logger):
""" Assign a DOI to the dataset. """
result = DatasetME.objects(user_id=g.user.user_id, name=name).first()
if result is None:
abort(404, 'Dataset with name %s does not exist for current user' % name)
logger.error('assign datasets is not implemented', user_id=g.user.user_id)
return result
@api.doc('delete_dataset')
@api.response(404, 'The dataset does not exist')
@api.response(400, 'The dataset has a DOI and cannot be deleted')
@api.marshal_with(dataset_model, skip_none=True, code=200, description='Dateset deleted')
@authenticate(required=True)
def delete(self, name: str):
""" Assign a DOI to the dataset. """
result = DatasetME.objects(user_id=g.user.user_id, name=name).first()
if result is None:
abort(404, 'Dataset with name %s does not exist for current user' % name)
if result.doi is not None:
abort(400, 'Dataset with name %s has a DOI and cannot be deleted' % name)
result.delete()
return result
......@@ -366,7 +366,7 @@ class RawFileQueryResource(Resource):
abort(400, message='bad parameter types')
search_request = search.SearchRequest()
add_query(search_request)
add_query(search_request, search_request_parser)
calcs = sorted([
(entry['upload_id'], entry['mainfile'])
......
......@@ -102,7 +102,7 @@ def add_common_parameters(request_parser):
for quantity in search.quantities.values():
request_parser.add_argument(
quantity.name, help=quantity.description,
action='append' if quantity.multi else None)
action=quantity.argparse_action if quantity.multi else None)
repo_request_parser = pagination_request_parser.copy()
......@@ -129,14 +129,16 @@ search_request_parser = api.parser()
add_common_parameters(search_request_parser)
def add_query(search_request: search.SearchRequest):
def add_query(search_request: search.SearchRequest, parser=repo_request_parser):
"""
Help that adds query relevant request parameters to the given SearchRequest.
"""
args = {key: value for key, value in parser.parse_args().items() if value is not None}
# owner
try:
search_request.owner(
request.args.get('owner', 'all'),
args.get('owner', 'all'),
g.user.user_id if g.user is not None else None)
except ValueError as e:
abort(401, getattr(e, 'message', 'Invalid owner parameter'))
......@@ -144,8 +146,8 @@ def add_query(search_request: search.SearchRequest):
abort(400, getattr(e, 'message', 'Invalid owner parameter'))
# time range
from_time_str = request.args.get('from_time', None)
until_time_str = request.args.get('until_time', None)
from_time_str = args.get('from_time', None)
until_time_str = args.get('until_time', None)
try:
from_time = rfc3339DateTime.parse(from_time_str) if from_time_str is not None else None
......@@ -156,7 +158,7 @@ def add_query(search_request: search.SearchRequest):
# optimade
try:
optimade = request.args.get('optimade', None)
optimade = args.get('optimade', None)
if optimade is not None:
q = filterparser.parse_filter(optimade)
search_request.query(q)
......@@ -165,8 +167,7 @@ def add_query(search_request: search.SearchRequest):
# search parameter
search_request.search_parameters(**{
key: request.args.getlist(key) if search.quantities[key] else request.args.get(key)
for key in request.args.keys()
key: value for key, value in args.items()
if key not in ['optimade'] and key in search.quantities})
......@@ -210,7 +211,7 @@ class RepoCalcsResource(Resource):
"""
search_request = search.SearchRequest()
add_query(search_request)
add_query(search_request, repo_request_parser)
try:
scroll = bool(request.args.get('scroll', False))
......@@ -325,7 +326,7 @@ class RepoQuantityResource(Resource):
"""
search_request = search.SearchRequest()
add_query(search_request)
add_query(search_request, repo_quantity_search_request_parser)
try:
after = request.args.get('after', None)
......
......@@ -222,6 +222,7 @@ class UploadListResource(Resource):
pagination=dict(total=total, page=page, per_page=per_page),
results=results), 200
@api.doc(security=list(api.authorizations.keys())) # weird bug, this should not be necessary
@api.doc('upload')
@api.expect(upload_metadata_parser)
@api.response(400, 'To many uploads')
......
......@@ -119,6 +119,16 @@ server {{
rewrite ^{1}/gui/service-worker.js /nomad/service-worker.js break;
}}
location {1}/gui/meta.json {{
add_header Last-Modified $date_gmt;
add_header Cache-Control 'no-store, no-cache, must-revalidate, proxy-revalidate, max-age=0';
if_modified_since off;
expires off;
etag off;
root /app/;
rewrite ^{1}/gui/meta.json /nomad/meta.json break;
}}
location {1}/api {{
proxy_set_header Host $host;
proxy_pass_request_headers on;
......
......@@ -187,7 +187,7 @@ client = NomadConfig(
url='http://localhost:8000/fairdi/nomad/latest/api'
)
version = '0.6.0'
version = '0.7.0'
commit = gitinfo.commit
release = 'devel'
domain = 'DFT'
......
......@@ -260,6 +260,7 @@ class DomainQuantity:
elastic_field: An optional elasticsearch key. Default is the name of the quantity.
elastic_value: A collable that takes a :class:`CalcWithMetadata` as input and produces the
value for the elastic search index.
argparse_action: Action to use on argparse, either append or split for multi values. Append is default.
"""
def __init__(
......@@ -268,7 +269,8 @@ class DomainQuantity:
zero_aggs: bool = True, metadata_field: str = None,
elastic_mapping: type = None,
elastic_search_type: str = 'term', elastic_field: str = None,
elastic_value: Callable[[Any], Any] = None):
elastic_value: Callable[[Any], Any] = None,
argparse_action: str = 'append'):
self._name: str = None
self.description = description
......@@ -281,6 +283,7 @@ class DomainQuantity:
self.elastic_search_type = elastic_search_type
self.metadata_field = metadata_field
self.elastic_field = elastic_field
self.argparse_action = argparse_action
self.elastic_value = elastic_value
if self.elastic_value is None:
......@@ -353,7 +356,9 @@ class Domain:
pid=DomainQuantity(description='Search for the pid.'),
raw_id=DomainQuantity(description='Search for the raw_id.'),
mainfile=DomainQuantity(description='Search for the mainfile.'),
external_id=DomainQuantity(description='External user provided id. Does not have to be unique necessarily.'),
external_id=DomainQuantity(
description='External user provided id. Does not have to be unique necessarily.',
multi=True, argparse_action='split', elastic_search_type='terms'),
dataset=DomainQuantity(
elastic_field='datasets.name', multi=True, elastic_search_type='match',
description='Search for a particular dataset by name.'),
......
......@@ -280,7 +280,12 @@ class LegacyMetainfoEnvironment:
return env
def generate_metainfo_code(self, package: Package, directory: str):
def generate_metainfo_code(
self, package: Package, directory: str = None, package_name: str = None):
if directory is None:
directory = '.'
def format_description(description, indent=0, width=90):
paragraphs = [paragraph.strip() for paragraph in description.split('\n')]
......@@ -315,8 +320,14 @@ class LegacyMetainfoEnvironment:
format_type=format_type,
format_unit=format_unit)
with open(os.path.join(directory, '%s.py' % package.name), 'wt') as f:
f.write(env.get_template('package.j2').render(pkg=package))
with open(os.path.join(
directory, '%s.py' % package_name
if package_name is not None else package.name), 'wt') as f:
code = env.get_template('package.j2').render(pkg=package)
code = '\n'.join([
line.rstrip() if line.strip() != '' else ''
for line in code.split('\n')])
f.write(code)
if __name__ == '__main__':
......@@ -326,3 +337,4 @@ if __name__ == '__main__':
package_names=['%s.nomadmetainfo.json' % pkg for pkg in ['common', 'public', 'vasp']])
legacy_env = env.legacy_info_env()
env.generate_metainfo_code(env.env.all_packages['public.nomadmetainfo.json'], package_name='public')
......@@ -1938,6 +1938,7 @@ class Environment(MSection):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.all_definitions_by_name: Dict[str, List[Definition]] = dict()
self.all_packages: Dict[str, Package] = dict()
def resolve_definitions( # type: ignore
self, name: str, cls: Type[MSectionBound] = Definition) -> List[MSectionBound]:
......@@ -1961,6 +1962,7 @@ class Environment(MSection):
def on_add_sub_section(self, sub_section_def: SubSection, sub_section: MSection):
if sub_section_def == Environment.packages:
package = sub_section.m_as(Package)
self.all_packages[package.name] = package
for definition in package.m_all_contents():
if isinstance(definition, Definition):
definitions = self.all_definitions_by_name.setdefault(definition.name, [])
......
......@@ -71,10 +71,11 @@ based on NOMAD-coe's *python-common* module.
:members:
"""
from typing import Callable, IO, Union
from typing import Callable, IO, Union, Dict
import magic
import gzip
import bz2
import lzma
import os.path
from nomad import files, config
......@@ -87,7 +88,8 @@ from nomad.parsing.artificial import TemplateParser, GenerateRandomParser, Chaos
_compressions = {
b'\x1f\x8b\x08': ('gz', gzip.open),
b'\x42\x5a\x68': ('bz2', bz2.open)
b'\x42\x5a\x68': ('bz2', bz2.open),
b'\xfd\x37\x7a': ('xz', lzma.open)
}
......@@ -116,7 +118,7 @@ def match_parser(mainfile: str, upload_files: Union[str, files.StagingUploadFile
with open(mainfile_path, 'rb') as f:
compression, open_compressed = _compressions.get(f.read(3), (None, open))
with open_compressed(mainfile_path, 'rb') as cf:
with open_compressed(mainfile_path, 'rb') as cf: # type: ignore
buffer = cf.read(config.parser_matching_size)
mime_type = magic.from_buffer(buffer, mime=True)
......@@ -147,14 +149,14 @@ parsers = [
LegacyParser(
name='parsers/vasp', code_name='VASP',
parser_class_name='vaspparser.VASPRunParserInterface',
mainfile_mime_re=r'(application/xml)|(text/.*)',
mainfile_mime_re=r'(application/.*)|(text/.*)',
mainfile_contents_re=(
r'^\s*<\?xml version="1\.0" encoding="ISO-8859-1"\?>\s*'
r'?\s*<modeling>'
r'?\s*<generator>'
r'?\s*<i name="program" type="string">\s*vasp\s*</i>'
r'?'),
supported_compressions=['gz', 'bz2']
supported_compressions=['gz', 'bz2', 'xz']
),
VaspOutcarParser(
name='parsers/vasp-outcar', code_name='VASP',
......
......@@ -319,6 +319,13 @@ class SearchRequest:
value = quantity.elastic_value(value)
if quantity.elastic_search_type == 'terms':
if not isinstance(value, list):
value = [value]
self.q &= Q('terms', **{quantity.elastic_field: value})
return self
if isinstance(value, list):
values = value
else:
......
apiVersion: v1
appVersion: "0.6.0"
appVersion: "0.7.0"
description: A Helm chart for Kubernetes that only runs nomad services and uses externally hosted databases.
name: nomad
version: 0.6.0
version: 0.7.0
......@@ -39,6 +39,16 @@ data:
rewrite ^{{ .Values.proxy.external.path }}/gui/service-worker.js /nomad/service-worker.js break;
}
location {{ .Values.proxy.external.path }}/gui/meta.json {
add_header Last-Modified $date_gmt;
add_header Cache-Control 'no-store, no-cache, must-revalidate, proxy-revalidate, max-age=0';
if_modified_since off;
expires off;
etag off;
root /app/;
rewrite ^{{ .Values.proxy.external.path }}/gui/meta.json /nomad/meta.json break;
}
location {{ .Values.proxy.external.path }}/api/uploads {
client_max_body_size 35g;
proxy_request_buffering off;
......
......@@ -12,7 +12,7 @@ reqs = [str(ir.req) for ir in install_reqs if 'sphinxcontrib.httpdomain' not in
setup(
name='nomad',
version='0.6.0',
version='0.7.0',
description='The nomad@FAIRDI infrastructure python package',
py_modules=['nomad'],
install_requires=reqs,
......
......@@ -30,6 +30,7 @@ from nomad import search, parsing, files, config, utils, infrastructure
from nomad.files import UploadFiles, PublicUploadFiles
from nomad.processing import Upload, Calc, SUCCESS
from nomad.datamodel import UploadWithMetadata, CalcWithMetadata, User
from nomad.app.api.dataset import DatasetME
from tests.conftest import create_auth_headers, clear_elastic
from tests.test_files import example_file, example_file_mainfile, example_file_contents
......@@ -599,19 +600,19 @@ class TestRepo():
calc_with_metadata.update(
calc_id='2', uploader=other_test_user.user_id, published=True,
with_embargo=False, pid=2, upload_time=today - datetime.timedelta(days=5),
external_id='external_id')
external_id='external_2')
calc_with_metadata.update(
atoms=['Fe'], comment='this is a specific word', formula='AAA', basis_set='zzz')
search.Entry.from_calc_with_metadata(calc_with_metadata).save(refresh=True)
calc_with_metadata.update(
calc_id='3', uploader=other_test_user.user_id, published=False,