diff --git a/.gitignore b/.gitignore index f7f2cd8e71e2ad93e461e53a7e8b7186421a7f7d..aad8c4db6c9f442c4de7a1985acc34ca5e7e0f04 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ .DS_Store .pyenv/ +.env/ __pycache__ .mypy_cache *.pyc diff --git a/.python-version b/.python-version new file mode 100644 index 0000000000000000000000000000000000000000..cff2619cfb9409b0f202973810ccc91fc1c25fc5 --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.6.9 diff --git a/dependencies/parsers/all_test_files.zip b/dependencies/parsers/all_test_files.zip new file mode 100644 index 0000000000000000000000000000000000000000..631cb2b0c37d4422b9eadefc5c24bd0002074153 Binary files /dev/null and b/dependencies/parsers/all_test_files.zip differ diff --git a/docs/setup.md b/docs/setup.md index 2b7bd3654db904fe5cfef4fb33a17a454fe9b709..d1e4363b9c6e6119fa9d31fc1d12541590e06ec7 100644 --- a/docs/setup.md +++ b/docs/setup.md @@ -53,8 +53,9 @@ brew install libmagic #### pyenv The nomad code currently targets python 3.6. If you host machine has 3.7 or later installed, you can use [pyenv](https://github.com/pyenv/pyenv) to use python 3.6 in parallel. -While in principle everything should be compatable with 3.7 and later there have been -issues with some dependencies and requirements not being compatible with 3.7 +To use 3.7 there is a slight issue about the `enum34` which fails the compilation of the +`mdtraj` and `mdanalysis` packages. A possible work arround is to uninstall and tham re-install +`enum34` once the other packages are installed. #### virtualenv We strongly recommend to use *virtualenv* to create a virtual environment. It will allow you diff --git a/nomad/api/app.py b/nomad/app/__init__.py similarity index 56% rename from nomad/api/app.py rename to nomad/app/__init__.py index 40936cc62fab571fa1c0290ca12c93f094ba28c8..fe59fe2083d88594f44cd0278f93662e3c5d1125 100644 --- a/nomad/api/app.py +++ b/nomad/app/__init__.py @@ -13,21 +13,38 @@ # limitations under the License. """ -All APIs are served by one Flask app (:py:mod:`nomad.api.app`) under different paths. -""" +This module comprises the nomad@FAIRDI APIs. + +The different APIs are upload, repository (raw data and search), and archive. + +There is a separate documentation for the API endpoints from a client perspective. + +.. autodata:: app -from flask import Flask, jsonify, url_for, abort -from flask_restplus import Api, fields +.. automodule:: nomad.api.app +.. automodule:: nomad.api.auth +.. automodule:: nomad.api.upload +.. automodule:: nomad.api.repo +.. automodule:: nomad.api.archive +.. automodule:: nomad.api.admin +""" +from flask import Flask, Blueprint, jsonify, url_for, abort, request +from flask_restplus import Api from flask_cors import CORS from werkzeug.exceptions import HTTPException from werkzeug.wsgi import DispatcherMiddleware import os.path -import inspect -from datetime import datetime -import pytz import random +from structlog import BoundLogger + +from nomad import config, utils as nomad_utils -from nomad import config, utils +from .api import blueprint as api +from .optimade import blueprint as optimade +from .docs import blueprint as docs + +logger: BoundLogger = None +""" A logger pre configured with information about the current request. """ base_path = config.services.api_base_path """ Provides the root path of the nomad APIs. """ @@ -47,10 +64,7 @@ if config.services.https: Api.specs_url = specs_url -app = Flask( - __name__, - static_url_path='/docs', - static_folder=os.path.abspath(os.path.join(os.path.dirname(__file__), '../../docs/.build/html'))) +app = Flask(__name__) """ The Flask app that serves all APIs. """ app.config.APPLICATION_ROOT = base_path # type: ignore @@ -59,6 +73,8 @@ app.config.RESTPLUS_MASK_SWAGGER = False # type: ignore app.config.SWAGGER_UI_OPERATION_ID = True # type: ignore app.config.SWAGGER_UI_REQUEST_DURATION = True # type: ignore +app.config['SECRET_KEY'] = config.services.api_secret + def api_base_path_response(env, resp): resp('200 OK', [('Content-Type', 'text/plain')]) @@ -73,15 +89,12 @@ app.wsgi_app = DispatcherMiddleware( # type: ignore CORS(app) -api = Api( - app, version='1.0', title='nomad@FAIRDI API', - description='Official API for nomad@FAIRDI services.', - validate=True) -""" Provides the flask restplust api instance """ +app.register_blueprint(api, url_prefix='/api') +app.register_blueprint(optimade, url_prefix='/optimade') +app.register_blueprint(docs, url_prefix='/docs') @app.errorhandler(Exception) -@api.errorhandler def handle(error: Exception): status_code = getattr(error, 'code', 500) if not isinstance(status_code, int): @@ -96,64 +109,38 @@ def handle(error: Exception): response = jsonify(data) response.status_code = status_code if status_code == 500: - utils.get_logger(__name__).error('internal server error', exc_info=error) + logger.error('internal server error', exc_info=error) + return response @app.route('/alive') def alive(): - """ Simply endpoint to utilize kubernetes liveness/readiness probing. """ + """ Simple endpoint to utilize kubernetes liveness/readiness probing. """ return "I am, alive!" -def with_logger(func): - """ - Decorator for endpoint implementations that provides a pre configured logger and - automatically logs errors on all 500 responses. - """ - signature = inspect.signature(func) - has_logger = 'logger' in signature.parameters - wrapper_signature = signature.replace(parameters=tuple( - param for param in signature.parameters.values() - if param.name != 'logger' - )) - - def wrapper(*args, **kwargs): - if has_logger: - args = inspect.getcallargs(wrapper, *args, **kwargs) - logger_args = { - k: v for k, v in args.items() - if k in ['upload_id', 'calc_id']} - logger = utils.get_logger(__name__, **logger_args) - args.update(logger=logger) - try: - return func(**args) - except HTTPException as e: - if getattr(e, 'code', None) == 500: - logger.error('Internal server error', exc_info=e) - raise e - except Exception as e: - logger.error('Internal server error', exc_info=e) - raise e - - wrapper.__signature__ = wrapper_signature - return wrapper - - -class RFC3339DateTime(fields.DateTime): - - def format(self, value): - if isinstance(value, datetime): - return super().format(value.replace(tzinfo=pytz.utc)) - else: - return str(value) - - -rfc3339DateTime = RFC3339DateTime() - - @app.before_request def before_request(): + # api logger + global logger + logger = nomad_utils.get_logger( + __name__, + blueprint=str(request.blueprint), + endpoint=request.endpoint, + method=request.method, + json=request.json, + args=request.args) + + # chaos monkey if config.services.api_chaos > 0: if random.randint(0, 100) <= config.services.api_chaos: abort(random.choice([400, 404, 500]), 'With best wishes from the chaos monkey.') + + +@app.before_first_request +def setup(): + from nomad import infrastructure + + if not app.config['TESTING']: + infrastructure.setup() diff --git a/nomad/api/__main__.py b/nomad/app/__main__.py similarity index 96% rename from nomad/api/__main__.py rename to nomad/app/__main__.py index 3598823e3f94854985ac3ab0657fa1ad961f3024..efafaf34c2433faccf1792fdcc1c17fcde0d878f 100644 --- a/nomad/api/__main__.py +++ b/nomad/app/__main__.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from nomad.api import app +from . import app def run_dev_server(*args, **kwargs): diff --git a/nomad/api/__init__.py b/nomad/app/api/__init__.py similarity index 50% rename from nomad/api/__init__.py rename to nomad/app/api/__init__.py index 328d5ba761f5810abe7faad996908127ab1c9dfc..5fda00a733acf92259651db8a124ed7788ff0d0e 100644 --- a/nomad/api/__init__.py +++ b/nomad/app/api/__init__.py @@ -12,30 +12,5 @@ # See the License for the specific language governing permissions and # limitations under the License. -""" -This module comprises the nomad@FAIRDI APIs. - -The different APIs are upload, repository (raw data and search), and archive. - -There is a separate documentation for the API endpoints from a client perspective. - -.. autodata:: app - -.. automodule:: nomad.api.app -.. automodule:: nomad.api.auth -.. automodule:: nomad.api.upload -.. automodule:: nomad.api.repo -.. automodule:: nomad.api.archive -.. automodule:: nomad.api.admin -""" -from .app import app +from .api import blueprint from . import info, auth, admin, upload, repo, archive, raw, mirror - - -@app.before_first_request -def setup(): - from nomad import infrastructure - from .app import api - - if not api.app.config['TESTING']: - infrastructure.setup() diff --git a/nomad/api/admin.py b/nomad/app/api/admin.py similarity index 99% rename from nomad/api/admin.py rename to nomad/app/api/admin.py index c2745391d1de50eeb5fd27725dcc033470734611..c1254f1b11552204b489b3df32c3a67c6d9bed29 100644 --- a/nomad/api/admin.py +++ b/nomad/app/api/admin.py @@ -17,7 +17,7 @@ from flask_restplus import abort, Resource, fields from nomad import infrastructure, config -from .app import api +from .api import api from .auth import admin_login_required diff --git a/nomad/app/api/api.py b/nomad/app/api/api.py new file mode 100644 index 0000000000000000000000000000000000000000..45d7c66bb956d74b647dde42ed24bc5db710be7e --- /dev/null +++ b/nomad/app/api/api.py @@ -0,0 +1,26 @@ +# Copyright 2018 Markus Scheidgen +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an"AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from flask import Blueprint +from flask_restplus import Api + + +blueprint = Blueprint('api', __name__) + +api = Api( + blueprint, + version='1.0', title='NOMAD API', + description='Official NOMAD API', + validate=True) +""" Provides the flask restplust api instance for the regular NOMAD api""" diff --git a/nomad/api/archive.py b/nomad/app/api/archive.py similarity index 99% rename from nomad/api/archive.py rename to nomad/app/api/archive.py index 3736458decb2a516ed9b54f8283ebd10884fc754..919f16717dff0a19bf94543501a86930dd10ef57 100644 --- a/nomad/api/archive.py +++ b/nomad/app/api/archive.py @@ -28,7 +28,7 @@ import nomad_meta_info from nomad.files import UploadFiles, Restricted -from .app import api +from .api import api from .auth import login_if_available, create_authorization_predicate, \ signature_token_argument, with_signature_token from .common import calc_route diff --git a/nomad/api/auth.py b/nomad/app/api/auth.py similarity index 98% rename from nomad/api/auth.py rename to nomad/app/api/auth.py index 94093427bc45be487c7a13f22ea1f041784adaa2..25da786e56d18645bebb7e57e96e263166e6c560 100644 --- a/nomad/api/auth.py +++ b/nomad/app/api/auth.py @@ -38,12 +38,12 @@ from flask_restplus import abort, Resource, fields from flask_httpauth import HTTPBasicAuth from datetime import datetime -from nomad import config, processing, files, utils, coe_repo +from nomad import processing, files, utils, coe_repo from nomad.coe_repo import User, LoginException -from .app import app, api, RFC3339DateTime +from nomad.app.utils import RFC3339DateTime +from .api import api -app.config['SECRET_KEY'] = config.services.api_secret auth = HTTPBasicAuth() diff --git a/nomad/api/common.py b/nomad/app/api/common.py similarity index 99% rename from nomad/api/common.py rename to nomad/app/api/common.py index 87fdbdb5f37727e4b903599d7c9f19c0b405e0d2..69e4031f8603d9cdfb6c27b97f163889ffb8db75 100644 --- a/nomad/api/common.py +++ b/nomad/app/api/common.py @@ -18,7 +18,7 @@ Common data, variables, decorators, models used throughout the API. from flask_restplus import fields -from .app import api +from .api import api pagination_model = api.model('Pagination', { diff --git a/nomad/api/info.py b/nomad/app/api/info.py similarity index 99% rename from nomad/api/info.py rename to nomad/app/api/info.py index ee806e6ad2259473a5e981ba308d5c6fd3b5b240..db2dc055905aff7e05adfa80847638fdc8e4fc46 100644 --- a/nomad/api/info.py +++ b/nomad/app/api/info.py @@ -20,7 +20,7 @@ from flask_restplus import Resource, fields from nomad import config, parsing, normalizing, datamodel, gitinfo -from .app import api +from .api import api ns = api.namespace('info', description='Access to nomad configuration details.') diff --git a/nomad/api/mirror.py b/nomad/app/api/mirror.py similarity index 99% rename from nomad/api/mirror.py rename to nomad/app/api/mirror.py index c62c959fce4279b4d87bedd81fcf12e8c1a3afbc..543204725fe66013e99abda0313ee24f23c9a31c 100644 --- a/nomad/api/mirror.py +++ b/nomad/app/api/mirror.py @@ -21,7 +21,7 @@ from flask_restplus import Resource, abort, fields from nomad import processing as proc -from .app import api +from .api import api from .auth import admin_login_required from .common import upload_route diff --git a/nomad/api/raw.py b/nomad/app/api/raw.py similarity index 99% rename from nomad/api/raw.py rename to nomad/app/api/raw.py index 3ac3869cd2b3784370a13cf9d559f1142519267e..68bce96f984d3fdcf0582011b3c2e4cdced056c1 100644 --- a/nomad/api/raw.py +++ b/nomad/app/api/raw.py @@ -29,7 +29,7 @@ from nomad import search, utils from nomad.files import UploadFiles, Restricted from nomad.processing import Calc -from .app import api +from .api import api from .auth import login_if_available, create_authorization_predicate, \ signature_token_argument, with_signature_token from .repo import search_request_parser, add_query diff --git a/nomad/api/repo.py b/nomad/app/api/repo.py similarity index 99% rename from nomad/api/repo.py rename to nomad/app/api/repo.py index 24323a672247324d76e1d0bd9d9df91a8e6af799..4706350311011945684078b4cdd5d4be9bc111fc 100644 --- a/nomad/api/repo.py +++ b/nomad/app/api/repo.py @@ -23,8 +23,9 @@ from flask import request, g from elasticsearch.exceptions import NotFoundError from nomad import search, utils, datamodel +from nomad.app.utils import rfc3339DateTime -from .app import api, rfc3339DateTime +from .api import api from .auth import login_if_available from .common import pagination_model, pagination_request_parser, calc_route diff --git a/nomad/api/upload.py b/nomad/app/api/upload.py similarity index 99% rename from nomad/api/upload.py rename to nomad/app/api/upload.py index fb5df116e4d03e3a7f01cb5b9f044879a8e37d6f..8fb6c10d7cc20881df0fa66234bbccc506731939 100644 --- a/nomad/api/upload.py +++ b/nomad/app/api/upload.py @@ -30,7 +30,8 @@ from nomad import config, utils, files from nomad.processing import Upload, FAILURE from nomad.processing import ProcessAlreadyRunning -from .app import api, with_logger, RFC3339DateTime +from nomad.app.utils import with_logger, RFC3339DateTime +from .api import api from .auth import login_really_required from .common import pagination_request_parser, pagination_model, upload_route diff --git a/nomad/app/docs.py b/nomad/app/docs.py new file mode 100644 index 0000000000000000000000000000000000000000..01115b3ef7101475b5a9e668c29ba46016be8e6b --- /dev/null +++ b/nomad/app/docs.py @@ -0,0 +1,20 @@ +# Copyright 2018 Markus Scheidgen +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an"AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from flask import Blueprint +import os.path + +docs_folder = os.path.abspath(os.path.join( + os.path.dirname(__file__), '../../docs/.build/html')) +blueprint = Blueprint('docs', __name__, static_url_path='/', static_folder=docs_folder) diff --git a/nomad/app/optimade/__init__.py b/nomad/app/optimade/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..fba1827a9266086d682b3942019443d4147d7eac --- /dev/null +++ b/nomad/app/optimade/__init__.py @@ -0,0 +1,26 @@ +# Copyright 2018 Markus Scheidgen +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an"AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from flask import Blueprint +from flask_restplus import Api + + +blueprint = Blueprint('optimade', __name__) + +api = Api( + blueprint, + version='1.0', title='NOMAD optimade PI', + description='The NOMAD optimade API', + validate=True) +""" Provides the flask restplust api instance for the optimade api""" diff --git a/nomad/app/utils.py b/nomad/app/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..912fd0cd0620517731ce7f1cfbaf1325358e96a2 --- /dev/null +++ b/nomad/app/utils.py @@ -0,0 +1,67 @@ +# Copyright 2018 Markus Scheidgen +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an"AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from werkzeug.exceptions import HTTPException +from flask_restplus import fields +from datetime import datetime +import pytz +import inspect + +from nomad import utils + + +def with_logger(func): + """ + Decorator for endpoint implementations that provides a pre configured logger and + automatically logs errors on all 500 responses. + """ + signature = inspect.signature(func) + has_logger = 'logger' in signature.parameters + wrapper_signature = signature.replace(parameters=tuple( + param for param in signature.parameters.values() + if param.name != 'logger' + )) + + def wrapper(*args, **kwargs): + if has_logger: + args = inspect.getcallargs(wrapper, *args, **kwargs) + logger_args = { + k: v for k, v in args.items() + if k in ['upload_id', 'calc_id']} + logger = utils.get_logger(__name__, **logger_args) + args.update(logger=logger) + try: + return func(**args) + except HTTPException as e: + if getattr(e, 'code', None) == 500: + logger.error('Internal server error', exc_info=e) + raise e + except Exception as e: + logger.error('Internal server error', exc_info=e) + raise e + + wrapper.__signature__ = wrapper_signature + return wrapper + + +class RFC3339DateTime(fields.DateTime): + + def format(self, value): + if isinstance(value, datetime): + return super().format(value.replace(tzinfo=pytz.utc)) + else: + return str(value) + + +rfc3339DateTime = RFC3339DateTime() diff --git a/nomad/cli/admin/run.py b/nomad/cli/admin/run.py index 574e1e01dfc6aa3f58a0f1af9261c3e940a6b205..cb4abf10f9f2443ea23a436501a8f8ab70f40076 100644 --- a/nomad/cli/admin/run.py +++ b/nomad/cli/admin/run.py @@ -41,7 +41,7 @@ def api(debug: bool, with_chaos: int): def run_api(**kwargs): config.service = 'api' from nomad import infrastructure - from nomad.api.__main__ import run_dev_server + from nomad.app.__main__ import run_dev_server infrastructure.setup() run_dev_server(port=8000, **kwargs) diff --git a/nomad/cli/client/statistics.py b/nomad/cli/client/statistics.py index f0899951e6c513b390aba0d7ed6691ddc96ac05f..bb69e551d6381845801843ed3650021c956fe35d 100644 --- a/nomad/cli/client/statistics.py +++ b/nomad/cli/client/statistics.py @@ -22,22 +22,23 @@ import matplotlib.pyplot as plt import matplotlib.ticker as ticker import numpy as np import click +import json from .client import client -def codes(client, metrics=[]): - data = client.repo.search(per_page=1, owner='admin', metrics=metrics).response().result +def codes(client, minimum=1, **kwargs): + data = client.repo.search(per_page=1, **kwargs).response().result x_values = sorted([ code for code, values in data.quantities['code_name'].items() - if code != 'not processed' and values['code_runs'] > 0], key=lambda x: x.lower()) + if code != 'not processed' and values['code_runs'] >= minimum], key=lambda x: x.lower()) return data.quantities, x_values, 'code_name', 'code' -def dates(client, metrics=[]): - data = client.repo.search(per_page=1, owner='admin', metrics=metrics, date_histogram=True).response().result +def dates(client, minimum=1, **kwargs): + data = client.repo.search(per_page=1, date_histogram=True, **kwargs).response().result x_values = list([ x for x in data.quantities['date_histogram'].keys()]) @@ -128,9 +129,11 @@ def error_fig(client): plt.show() + return fig, plt + class Metric: - def __init__(self, metric, label=None, power=1, multiplier=1, format=None, cumulate=False): + def __init__(self, metric, label=None, power=None, multiplier=1, format=None, cumulate=False): if label is None: label = metric @@ -142,14 +145,18 @@ class Metric: self.format = format self.cumulate = cumulate - def draw_axis(self, axis, data, x_values, x_positions, width, color): + def draw_axis(self, axis, data, x_values, x_positions, width, color, only=False): + label_color = 'black' if only else color value_map = { x: values[self.metric] for x, values in data[self.agg].items() if x in x_values} - axis.set_yscale('power', exponent=self.power) - axis.set_ylabel(self.label, color=color) + if self.power is not None: + axis.set_yscale('power', exponent=self.power) + else: + axis.set_yscale('log') + axis.set_ylabel(self.label, color=label_color) if self.format is not None: axis.yaxis.set_major_formatter(ticker.StrMethodFormatter(self.format)) @@ -157,29 +164,39 @@ class Metric: y_values = [value_map[x] * self.multiplier for x in x_values] if self.cumulate: y_values = np.array(y_values).cumsum() - axis.bar(x_positions, y_values, width, label=self.label, color=color) - axis.tick_params(axis='y', labelcolor=color) + axis.bar(x_positions, y_values, width, label=self.label, color=color, align='edge') + axis.tick_params(axis='y', labelcolor=label_color) + + for x, v in zip(x_positions, y_values): + axis.text(x + .1, v, ' {:,}'.format(int(v)), color=color, fontweight='bold', rotation=90) + # TODO remove + if color.endswith('red'): + import matplotlib.lines as mlines + line = mlines.Line2D([min(x_positions), max(x_positions)], [80, 80], color=color) + axis.add_line(line) -def bar_plot(client, retrieve, metric1, metric2=None, title=None): + +def bar_plot(client, retrieve, metric1, metric2=None, title=None, **kwargs): metrics = [] if metric1.metric == 'code_runs' else [metric1.metric] if metric2 is not None: metrics += [] if metric2.metric == 'code_runs' else [metric2.metric] - data, x_values, agg, agg_label = retrieve(client, metrics) + data, x_values, agg, agg_label = retrieve(client, metrics=metrics, **kwargs) metric1.agg = agg if metric2 is not None: metric2.agg = agg - fig, ax1 = plt.subplots(figsize=(15, 6), dpi=72) + fig, ax1 = plt.subplots(figsize=(8, 6), dpi=72) x = np.arange(len(x_values)) - width = 0.7 / 2 + width = 0.8 / 2 if metric2 is None: - width = 0.7 + width = 0.8 plt.sca(ax1) plt.xticks(rotation=90) ax1.set_xticks(x) - ax1.set_xticklabels(x_values) + ax1.set_xticklabels([value if value != 'Quantum Espresso' else 'Q. Espresso' for value in x_values]) + ax1.margins(x=0.01) if title is None: title = 'Number of %s' % metric1.label if metric2 is not None: @@ -187,14 +204,15 @@ def bar_plot(client, retrieve, metric1, metric2=None, title=None): title += ' per %s' % agg_label ax1.set_title(title) - metric1.draw_axis(ax1, data, x_values, x - (width / 2 if metric2 is not None else 0), width, 'tab:red') + metric1.draw_axis(ax1, data, x_values, x - (width / 2), width, 'tab:blue', only=metric2 is None) if metric2: ax2 = ax1.twinx() # instantiate a second axes that shares the same x-axis - metric2.draw_axis(ax2, data, x_values, x + width / 2, width, 'tab:blue') + metric2.draw_axis(ax2, data, x_values, x + width / 2, width, 'tab:red') fig.tight_layout() - plt.show() + + return fig, plt @client.command(help='Generate various matplotlib charts') @@ -204,7 +222,11 @@ def bar_plot(client, retrieve, metric1, metric2=None, title=None): @click.option('--cumulate', is_flag=True, help='Cumulate over x-axis.') @click.option('--title', type=str, help='Override chart title with given value.') @click.option('--total', is_flag=True, help='Provide total sums of key metrics.') -def statistics(errors, title, x_axis, y_axis, cumulate, total): +@click.option('--save', type=str, help='Save to given file instead of showing the plot.') +@click.option('--power', type=float, help='User power scale instead of log with the given inverse power.') +@click.option('--open-access', is_flag=True, help='Only consider Open-Access data.') +@click.option('--minimum', type=int, default=1, help='Only consider codes with at least the given ammount of entries.') +def statistics(errors, title, x_axis, y_axis, cumulate, total, save, power, open_access, minimum): from .client import create_client client = create_client() @@ -244,12 +266,24 @@ def statistics(errors, title, x_axis, y_axis, cumulate, total): mscale.register_scale(PowerScale) + kwargs = {} + if cumulate: + kwargs.update( + power=1, + multiplier=1e-6, + format='{x:,.1f}M') + elif power is not None: + kwargs.update( + power=1 / power, + multiplier=1e-6, + format='{x:,.1f}M') + metrics = { 'entries': Metric( 'code_runs', label='entries (code runs)', cumulate=cumulate, - power=0.25 if not cumulate else 1, multiplier=1e-6, format='{x:,.1f}M'), + **kwargs), 'users': Metric( 'users', cumulate=cumulate, @@ -258,16 +292,18 @@ def statistics(errors, title, x_axis, y_axis, cumulate, total): 'total_energies', label='total energy calculations', cumulate=cumulate, - power=0.25 if not cumulate else 1, multiplier=1e-6, format='{x:,.1f}M'), + **kwargs), 'calculations': Metric( 'calculations', - label='single configuration calculations', + label='calculations (e.g. total energy)', cumulate=cumulate, - power=0.25 if not cumulate else 1, multiplier=1e-6, format='{x:,.1f}M') + **kwargs) } if errors: - error_fig(client) + fig, plt = error_fig(client) + + owner = 'all' if open_access else 'admin' if x_axis is not None: assert 1 <= len(y_axis) <= 2, 'Need 1 or 2 y axis' @@ -281,8 +317,14 @@ def statistics(errors, title, x_axis, y_axis, cumulate, total): y_axis = [metrics[y] for y in y_axis] - bar_plot(client, x_axis, *y_axis, title=title) + fig, plt = bar_plot(client, x_axis, *y_axis, title=title, owner=owner, minimum=minimum, code_name="VASP") + + if errors or x_axis is not None: + if save is not None: + fig.savefig(save, bbox_inches='tight') + else: + plt.show() if total: - data = client.repo.search(per_page=1, owner='admin', metrics=['total_energies', 'calculations', 'users', 'datasets']).response().result - print(data.quantities['total']) + data = client.repo.search(per_page=1, owner=owner, metrics=['total_energies', 'calculations', 'users', 'datasets']).response().result + print(json.dumps(data.quantities['total'], indent=4)) diff --git a/nomad/config.py b/nomad/config.py index 004a8f1fc44504418bd6b18a2a76155da3ce96d3..a72cb0264cb6cd3a829b990577ecb077d1ae54f3 100644 --- a/nomad/config.py +++ b/nomad/config.py @@ -140,7 +140,7 @@ logstash = NomadConfig( services = NomadConfig( api_host='localhost', api_port=8000, - api_base_path='/fairdi/nomad/latest/api', + api_base_path='/fairdi/nomad/latest', api_secret='defaultApiSecret', api_chaos=0, admin_password='password', diff --git a/nomad/utils.py b/nomad/utils.py index e6a0a353c2f818b8d6989922d7fb55ac22c05143..0e816c6d38587cc606dcb8b4a69744ab80f41139 100644 --- a/nomad/utils.py +++ b/nomad/utils.py @@ -205,7 +205,6 @@ class ConsoleFormatter(LogstashFormatter): for key in keys: out.write('\n - %s: %s' % (key, str(message_dict.get(key, None)))) - return out.getvalue() diff --git a/ops/scripts/misc.http b/ops/scripts/misc.http index 25e6c5c23d80cb5ca7f21d0906591b9832b42c47..024fec67d38527573867a40934cbd3d5c9e67da7 100644 --- a/ops/scripts/misc.http +++ b/ops/scripts/misc.http @@ -75,18 +75,40 @@ Content-Type: application/json "size": 0, "query": { "bool": { - "must": { - "match": { - "code_name": "currupted mainfile" - } + "must": [ + { "match": { "code_name": "Phonopy" } }, + { "match": { "published": false}} + ] + } + }, + "aggs": { + "upload_id": { + "sum": { + "field": "n_calculations" } } + } +} + +### +# Search migration unpublished calcs + +GET http://localhost:19200/fairdi_nomad_prod/_search HTTP/1.1 +Content-Type: application/json + +{ + "size": 1, + "query": { + "bool": { + "must": [ + { "match": { "calc_hash": "CNyiqVrxIJbRhSgn4ADUy_wi_dBx" } } + ] + } }, "aggs": { "upload_id": { "terms": { - "field": "upload_id", - "size": 300 + "field": "uploader.user_id" } } } @@ -114,4 +136,40 @@ Content-Type: application/json "number_of_replicas" : 2, "refresh_interval" : null } -} \ No newline at end of file +} + +### +# Archive ES +GET http://localhost:19202/archive2018-09-24/_search HTTP/1.1 +content-type: application/json + +{ + "size": 1, + "query": { + }, + "aggs": { + "codes": { + "terms": { + "field": "section_run.program_name" + } + } + } +} + +### +# Archive ES +GET http://localhost:19202/nomadarchiverepo/_search HTTP/1.1 +content-type: application/json + +{ + "size": 1, + "query": { + }, + "aggs": { + "codes": { + "terms": { + "field": "program_name" + } + } + } +} diff --git a/tests/app/resource.py b/tests/app/resource.py new file mode 100644 index 0000000000000000000000000000000000000000..877031f9df65f1d5c7e5bf12b4549f8d7df9a216 --- /dev/null +++ b/tests/app/resource.py @@ -0,0 +1,36 @@ +# Copyright 2018 Markus Scheidgen +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an"AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +API endpoints that cause various scenerios to test general API aspects like logging, +error handling, etc. +""" + +from flask_restplus import Resource + +from nomad.app.api.api import api + + +ns = api.namespace('test', description='Only used for tests.') + + +@ns.route('/ise') +class InternalServerErrorResource(Resource): + @api.doc('get_error') + def get(self): + raise Exception('test exception') + + @api.doc('post_error') + def post(self): + raise Exception('test exception') diff --git a/tests/test_api.py b/tests/app/test_api.py similarity index 69% rename from tests/test_api.py rename to tests/app/test_api.py index 53ce3b8db7126d7a70e40f75fe5f83a52bb55575..65bf984c49c58b18ac7a638bfc62888ae670249d 100644 --- a/tests/test_api.py +++ b/tests/app/test_api.py @@ -25,7 +25,7 @@ import datetime import os.path from urllib.parse import urlencode -from nomad.api.app import rfc3339DateTime +from nomad.app.utils import rfc3339DateTime from nomad import coe_repo, search, parsing, files, config, utils from nomad.files import UploadFiles, PublicUploadFiles from nomad.processing import Upload, Calc, SUCCESS @@ -37,18 +37,19 @@ from tests.test_files import create_staging_upload, create_public_upload, assert from tests.test_coe_repo import assert_coe_upload from tests.test_search import assert_search_upload +from tests.app.test_app import BlueprintClient logger = utils.get_logger(__name__) -def test_alive(client): - rv = client.get('/alive') - assert rv.status_code == 200 +@pytest.fixture(scope='function') +def api(client): + return BlueprintClient(client, '/api') @pytest.fixture(scope='function') -def test_user_signature_token(client, test_user_auth): - rv = client.get('/auth/token', headers=test_user_auth) +def test_user_signature_token(api, test_user_auth): + rv = api.get('/auth/token', headers=test_user_auth) assert rv.status_code == 200 return json.loads(rv.data)['token'] @@ -62,8 +63,8 @@ def get_upload_with_metadata(upload: dict) -> UploadWithMetadata: class TestInfo: - def test_info(self, client): - rv = client.get('/info/') + def test_info(self, api): + rv = api.get('/info/') data = json.loads(rv.data) assert 'codes' in data assert 'parsers' in data @@ -73,67 +74,67 @@ class TestInfo: class TestAdmin: @pytest.mark.timeout(config.tests.default_timeout) - def test_reset(self, client, admin_user_auth, expandable_postgres, monkeypatch): + def test_reset(self, api, admin_user_auth, expandable_postgres, monkeypatch): monkeypatch.setattr('nomad.config.services.disable_reset', False) - rv = client.post('/admin/reset', headers=admin_user_auth) + rv = api.post('/admin/reset', headers=admin_user_auth) assert rv.status_code == 200 @pytest.mark.timeout(config.tests.default_timeout) - def test_remove(self, client, admin_user_auth, expandable_postgres, monkeypatch): + def test_remove(self, api, admin_user_auth, expandable_postgres, monkeypatch): monkeypatch.setattr('nomad.config.services.disable_reset', False) - rv = client.post('/admin/remove', headers=admin_user_auth) + rv = api.post('/admin/remove', headers=admin_user_auth) assert rv.status_code == 200 - def test_doesnotexist(self, client, admin_user_auth): - rv = client.post('/admin/doesnotexist', headers=admin_user_auth) + def test_doesnotexist(self, api, admin_user_auth): + rv = api.post('/admin/doesnotexist', headers=admin_user_auth) assert rv.status_code == 404 - def test_only_admin(self, client, test_user_auth): - rv = client.post('/admin/reset', headers=test_user_auth) + def test_only_admin(self, api, test_user_auth): + rv = api.post('/admin/reset', headers=test_user_auth) assert rv.status_code == 401 - def test_disabled(self, client, admin_user_auth, expandable_postgres, monkeypatch): + def test_disabled(self, api, admin_user_auth, expandable_postgres, monkeypatch): monkeypatch.setattr('nomad.config.services.disable_reset', True) - rv = client.post('/admin/reset', headers=admin_user_auth) + rv = api.post('/admin/reset', headers=admin_user_auth) assert rv.status_code == 400 class TestAuth: - def test_xtoken_auth(self, client, test_user: coe_repo.User, no_warn): - rv = client.get('/uploads/', headers={ + def test_xtoken_auth(self, api, test_user: coe_repo.User, no_warn): + rv = api.get('/uploads/', headers={ 'X-Token': test_user.first_name.lower() # the test users have their firstname as tokens for convinience }) assert rv.status_code == 200 - def test_xtoken_auth_denied(self, client, no_warn, postgres): - rv = client.get('/uploads/', headers={ + def test_xtoken_auth_denied(self, api, no_warn, postgres): + rv = api.get('/uploads/', headers={ 'X-Token': 'invalid' }) assert rv.status_code == 401 - def test_basic_auth(self, client, test_user_auth, no_warn): - rv = client.get('/uploads/', headers=test_user_auth) + def test_basic_auth(self, api, test_user_auth, no_warn): + rv = api.get('/uploads/', headers=test_user_auth) assert rv.status_code == 200 - def test_basic_auth_denied(self, client, no_warn): + def test_basic_auth_denied(self, api, no_warn): basic_auth_base64 = base64.b64encode('invalid'.encode('utf-8')).decode('utf-8') - rv = client.get('/uploads/', headers={ + rv = api.get('/uploads/', headers={ 'Authorization': 'Basic %s' % basic_auth_base64 }) assert rv.status_code == 401 - def test_get_user(self, client, test_user_auth, test_user: coe_repo.User, no_warn): - rv = client.get('/auth/user', headers=test_user_auth) + def test_get_user(self, api, test_user_auth, test_user: coe_repo.User, no_warn): + rv = api.get('/auth/user', headers=test_user_auth) assert rv.status_code == 200 - self.assert_user(client, json.loads(rv.data)) + self.assert_user(api, json.loads(rv.data)) - def assert_user(self, client, user): + def assert_user(self, api, user): for key in ['first_name', 'last_name', 'email', 'token']: assert key in user - rv = client.get('/uploads/', headers={ + rv = api.get('/uploads/', headers={ 'X-Token': user['token'] }) @@ -145,7 +146,7 @@ class TestAuth: @pytest.mark.parametrize('token, affiliation', [ ('test_token', dict(name='HU Berlin', address='Unter den Linden 6')), (None, None)]) - def test_put_user(self, client, postgres, admin_user_auth, token, affiliation): + def test_put_user(self, api, postgres, admin_user_auth, token, affiliation): data = dict( email='test@email.com', last_name='Tester', first_name='Testi', token=token, affiliation=affiliation, @@ -153,30 +154,30 @@ class TestAuth: data = {key: value for key, value in data.items() if value is not None} - rv = client.put( + rv = api.put( '/auth/user', headers=admin_user_auth, content_type='application/json', data=json.dumps(data)) assert rv.status_code == 200 - self.assert_user(client, json.loads(rv.data)) + self.assert_user(api, json.loads(rv.data)) - def test_put_user_admin_only(self, client, test_user_auth): - rv = client.put( + def test_put_user_admin_only(self, api, test_user_auth): + rv = api.put( '/auth/user', headers=test_user_auth, content_type='application/json', data=json.dumps(dict( email='test@email.com', last_name='Tester', first_name='Testi', password=bcrypt.encrypt('test_password', ident='2y')))) assert rv.status_code == 401 - def test_put_user_required_field(self, client, admin_user_auth): - rv = client.put( + def test_put_user_required_field(self, api, admin_user_auth): + rv = api.put( '/auth/user', headers=admin_user_auth, content_type='application/json', data=json.dumps(dict( email='test@email.com', password=bcrypt.encrypt('test_password', ident='2y')))) assert rv.status_code == 400 - def test_post_user(self, client, postgres, admin_user_auth): - rv = client.put( + def test_post_user(self, api, postgres, admin_user_auth): + rv = api.put( '/auth/user', headers=admin_user_auth, content_type='application/json', data=json.dumps(dict( email='test@email.com', last_name='Tester', first_name='Testi', @@ -185,13 +186,13 @@ class TestAuth: assert rv.status_code == 200 user = json.loads(rv.data) - rv = client.post( + rv = api.post( '/auth/user', headers={'X-Token': user['token']}, content_type='application/json', data=json.dumps(dict( last_name='Tester', first_name='Testi v.', password=bcrypt.encrypt('test_password_changed', ident='2y')))) assert rv.status_code == 200 - self.assert_user(client, json.loads(rv.data)) + self.assert_user(api, json.loads(rv.data)) class TestUploads: @@ -220,11 +221,11 @@ class TestUploads: return data - def assert_processing(self, client, test_user_auth, upload_id): + def assert_processing(self, api, test_user_auth, upload_id): upload_endpoint = '/uploads/%s' % upload_id # poll until completed - upload = self.block_until_completed(client, upload_id, test_user_auth) + upload = self.block_until_completed(api, upload_id, test_user_auth) assert len(upload['tasks']) == 4 assert upload['tasks_status'] == SUCCESS @@ -236,10 +237,10 @@ class TestUploads: assert calc['tasks_status'] == SUCCESS assert calc['current_task'] == 'archiving' assert len(calc['tasks']) == 3 - assert client.get('/archive/logs/%s/%s' % (calc['upload_id'], calc['calc_id']), headers=test_user_auth).status_code == 200 + assert api.get('/archive/logs/%s/%s' % (calc['upload_id'], calc['calc_id']), headers=test_user_auth).status_code == 200 if upload['calcs']['pagination']['total'] > 1: - rv = client.get('%s?page=2&per_page=1&order_by=tasks_status' % upload_endpoint, headers=test_user_auth) + rv = api.get('%s?page=2&per_page=1&order_by=tasks_status' % upload_endpoint, headers=test_user_auth) assert rv.status_code == 200 upload = self.assert_upload(rv.data) assert len(upload['calcs']['results']) == 1 @@ -248,13 +249,13 @@ class TestUploads: assert_upload_files(upload_with_metadata, files.StagingUploadFiles) assert_search_upload(upload_with_metadata, additional_keys=['atoms', 'system']) - def assert_published(self, client, test_user_auth, upload_id, proc_infra, with_coe_repo=True, metadata={}, publish_with_metadata: bool = True): - rv = client.get('/uploads/%s' % upload_id, headers=test_user_auth) + def assert_published(self, api, test_user_auth, upload_id, proc_infra, with_coe_repo=True, metadata={}, publish_with_metadata: bool = True): + rv = api.get('/uploads/%s' % upload_id, headers=test_user_auth) upload = self.assert_upload(rv.data) upload_with_metadata = get_upload_with_metadata(upload) - rv = client.post( + rv = api.post( '/uploads/%s' % upload_id, headers=test_user_auth, data=json.dumps(dict(operation='publish', metadata=metadata if publish_with_metadata else {})), @@ -268,7 +269,7 @@ class TestUploads: if with_coe_repo: additional_keys.append('pid') - self.block_until_completed(client, upload_id, test_user_auth) + self.block_until_completed(api, upload_id, test_user_auth) upload_proc = Upload.objects(upload_id=upload_id).first() assert upload_proc is not None assert upload_proc.published is True @@ -278,10 +279,10 @@ class TestUploads: assert_upload_files(upload_with_metadata, files.PublicUploadFiles, published=True) assert_search_upload(upload_with_metadata, additional_keys=additional_keys, published=True) - def block_until_completed(self, client, upload_id: str, test_user_auth): + def block_until_completed(self, api, upload_id: str, test_user_auth): while True: time.sleep(0.1) - rv = client.get('/uploads/%s' % upload_id, headers=test_user_auth) + rv = api.get('/uploads/%s' % upload_id, headers=test_user_auth) if rv.status_code == 200: upload = self.assert_upload(rv.data) if not upload['process_running'] and not upload['tasks_running']: @@ -293,36 +294,36 @@ class TestUploads: 'unexpected status code while blocking for upload processing: %s' % str(rv.status_code)) - def assert_upload_does_not_exist(self, client, upload_id: str, test_user_auth): - self.block_until_completed(client, upload_id, test_user_auth) + def assert_upload_does_not_exist(self, api, upload_id: str, test_user_auth): + self.block_until_completed(api, upload_id, test_user_auth) - rv = client.get('/uploads/%s' % upload_id, headers=test_user_auth) + rv = api.get('/uploads/%s' % upload_id, headers=test_user_auth) assert rv.status_code == 404 assert Upload.objects(upload_id=upload_id).first() is None assert Calc.objects(upload_id=upload_id).count() is 0 upload_files = UploadFiles.get(upload_id) assert upload_files is None or isinstance(upload_files, PublicUploadFiles) - def test_get_command(self, client, test_user_auth, no_warn): - rv = client.get('/uploads/command', headers=test_user_auth) + def test_get_command(self, api, test_user_auth, no_warn): + rv = api.get('/uploads/command', headers=test_user_auth) assert rv.status_code == 200 data = json.loads(rv.data) assert 'upload_command' in data assert 'upload_url' in data - def test_get_empty(self, client, test_user_auth, no_warn): - rv = client.get('/uploads/', headers=test_user_auth) + def test_get_empty(self, api, test_user_auth, no_warn): + rv = api.get('/uploads/', headers=test_user_auth) assert rv.status_code == 200 self.assert_uploads(rv.data, count=0) - def test_get_not_existing(self, client, test_user_auth, no_warn): - rv = client.get('/uploads/123456789012123456789012', headers=test_user_auth) + def test_get_not_existing(self, api, test_user_auth, no_warn): + rv = api.get('/uploads/123456789012123456789012', headers=test_user_auth) assert rv.status_code == 404 @pytest.mark.parametrize('mode', ['multipart', 'stream', 'local_path']) @pytest.mark.parametrize('name', [None, 'test_name']) - def test_put(self, client, test_user_auth, proc_infra, example_upload, mode, name, no_warn): + def test_put(self, api, test_user_auth, proc_infra, example_upload, mode, name, no_warn): file = example_upload if name: url = '/uploads/?name=%s' % name @@ -330,17 +331,17 @@ class TestUploads: url = '/uploads/' if mode == 'multipart': - rv = client.put( + rv = api.put( url, data=dict(file=(open(file, 'rb'), 'the_name')), headers=test_user_auth) if not name: name = 'the_name' elif mode == 'stream': with open(file, 'rb') as f: - rv = client.put(url, data=f.read(), headers=test_user_auth) + rv = api.put(url, data=f.read(), headers=test_user_auth) elif mode == 'local_path': url += '&' if name else '?' url += 'local_path=%s' % file - rv = client.put(url, headers=test_user_auth) + rv = api.put(url, headers=test_user_auth) else: assert False @@ -351,18 +352,19 @@ class TestUploads: upload = self.assert_upload(rv.data, name=name) assert upload['tasks_running'] - self.assert_processing(client, test_user_auth, upload['upload_id']) + self.assert_processing(api, test_user_auth, upload['upload_id']) - def test_upload_limit(self, client, mongo, test_user, test_user_auth, proc_infra): + @pytest.mark.timeout(config.tests.default_timeout) + def test_upload_limit(self, api, mongo, test_user, test_user_auth, proc_infra): for _ in range(0, config.services.upload_limit): Upload.create(user=test_user) file = example_file - rv = client.put('/uploads/?local_path=%s' % file, headers=test_user_auth) + rv = api.put('/uploads/?local_path=%s' % file, headers=test_user_auth) assert rv.status_code == 400 assert Upload.user_uploads(test_user).count() == config.services.upload_limit - def test_delete_not_existing(self, client, test_user_auth, no_warn): - rv = client.delete('/uploads/123456789012123456789012', headers=test_user_auth) + def test_delete_not_existing(self, api, test_user_auth, no_warn): + rv = api.delete('/uploads/123456789012123456789012', headers=test_user_auth) assert rv.status_code == 404 @pytest.fixture(scope='function') @@ -377,69 +379,69 @@ class TestUploads: yield True monkeypatch.setattr('nomad.processing.data.Upload.cleanup', old_cleanup) - def test_delete_published(self, client, test_user_auth, proc_infra, no_warn, with_publish_to_coe_repo): - rv = client.put('/uploads/?local_path=%s' % example_file, headers=test_user_auth) + def test_delete_published(self, api, test_user_auth, proc_infra, no_warn, with_publish_to_coe_repo): + rv = api.put('/uploads/?local_path=%s' % example_file, headers=test_user_auth) upload = self.assert_upload(rv.data) - self.assert_processing(client, test_user_auth, upload['upload_id']) - self.assert_published(client, test_user_auth, upload['upload_id'], proc_infra, with_coe_repo=with_publish_to_coe_repo) - rv = client.delete('/uploads/%s' % upload['upload_id'], headers=test_user_auth) + self.assert_processing(api, test_user_auth, upload['upload_id']) + self.assert_published(api, test_user_auth, upload['upload_id'], proc_infra, with_coe_repo=with_publish_to_coe_repo) + rv = api.delete('/uploads/%s' % upload['upload_id'], headers=test_user_auth) assert rv.status_code == 400 - def test_delete(self, client, test_user_auth, proc_infra, no_warn): - rv = client.put('/uploads/?local_path=%s' % example_file, headers=test_user_auth) + def test_delete(self, api, test_user_auth, proc_infra, no_warn): + rv = api.put('/uploads/?local_path=%s' % example_file, headers=test_user_auth) upload = self.assert_upload(rv.data) - self.assert_processing(client, test_user_auth, upload['upload_id']) - rv = client.delete('/uploads/%s' % upload['upload_id'], headers=test_user_auth) + self.assert_processing(api, test_user_auth, upload['upload_id']) + rv = api.delete('/uploads/%s' % upload['upload_id'], headers=test_user_auth) assert rv.status_code == 200 - self.assert_upload_does_not_exist(client, upload['upload_id'], test_user_auth) + self.assert_upload_does_not_exist(api, upload['upload_id'], test_user_auth) - def test_post_empty(self, client, test_user_auth, empty_upload, proc_infra, no_warn): - rv = client.put('/uploads/?local_path=%s' % empty_upload, headers=test_user_auth) + def test_post_empty(self, api, test_user_auth, empty_upload, proc_infra, no_warn): + rv = api.put('/uploads/?local_path=%s' % empty_upload, headers=test_user_auth) assert rv.status_code == 200 upload = self.assert_upload(rv.data) - self.assert_processing(client, test_user_auth, upload['upload_id']) - rv = client.post( + self.assert_processing(api, test_user_auth, upload['upload_id']) + rv = api.post( '/uploads/%s' % upload['upload_id'], headers=test_user_auth, data=json.dumps(dict(operation='publish')), content_type='application/json') assert rv.status_code == 400 - def test_post(self, client, test_user_auth, non_empty_example_upload, proc_infra, no_warn, with_publish_to_coe_repo): - rv = client.put('/uploads/?local_path=%s' % non_empty_example_upload, headers=test_user_auth) + def test_post(self, api, test_user_auth, non_empty_example_upload, proc_infra, no_warn, with_publish_to_coe_repo): + rv = api.put('/uploads/?local_path=%s' % non_empty_example_upload, headers=test_user_auth) assert rv.status_code == 200 upload = self.assert_upload(rv.data) - self.assert_processing(client, test_user_auth, upload['upload_id']) - self.assert_published(client, test_user_auth, upload['upload_id'], proc_infra, with_coe_repo=with_publish_to_coe_repo) + self.assert_processing(api, test_user_auth, upload['upload_id']) + self.assert_published(api, test_user_auth, upload['upload_id'], proc_infra, with_coe_repo=with_publish_to_coe_repo) # still visible - assert client.get('/uploads/%s' % upload['upload_id'], headers=test_user_auth).status_code == 200 + assert api.get('/uploads/%s' % upload['upload_id'], headers=test_user_auth).status_code == 200 # still listed with all=True - rv = client.get('/uploads/?state=all', headers=test_user_auth) + rv = api.get('/uploads/?state=all', headers=test_user_auth) assert rv.status_code == 200 data = json.loads(rv.data)['results'] assert len(data) > 0 assert any(item['upload_id'] == upload['upload_id'] for item in data) # not listed with all=False - rv = client.get('/uploads/', headers=test_user_auth) + rv = api.get('/uploads/', headers=test_user_auth) assert rv.status_code == 200 data = json.loads(rv.data)['results'] assert not any(item['upload_id'] == upload['upload_id'] for item in data) def test_post_metadata( - self, client, proc_infra, admin_user_auth, test_user_auth, test_user, + self, api, proc_infra, admin_user_auth, test_user_auth, test_user, other_test_user, no_warn, example_user_metadata): - rv = client.put('/uploads/?local_path=%s' % example_file, headers=test_user_auth) + rv = api.put('/uploads/?local_path=%s' % example_file, headers=test_user_auth) upload = self.assert_upload(rv.data) - self.assert_processing(client, test_user_auth, upload['upload_id']) + self.assert_processing(api, test_user_auth, upload['upload_id']) metadata = dict(**example_user_metadata) metadata['_upload_time'] = datetime.datetime.utcnow().isoformat() - self.assert_published(client, admin_user_auth, upload['upload_id'], proc_infra, metadata) + self.assert_published(api, admin_user_auth, upload['upload_id'], proc_infra, metadata) - def test_post_metadata_forbidden(self, client, proc_infra, test_user_auth, no_warn): - rv = client.put('/uploads/?local_path=%s' % example_file, headers=test_user_auth) + def test_post_metadata_forbidden(self, api, proc_infra, test_user_auth, no_warn): + rv = api.put('/uploads/?local_path=%s' % example_file, headers=test_user_auth) upload = self.assert_upload(rv.data) - self.assert_processing(client, test_user_auth, upload['upload_id']) - rv = client.post( + self.assert_processing(api, test_user_auth, upload['upload_id']) + rv = api.post( '/uploads/%s' % upload['upload_id'], headers=test_user_auth, data=json.dumps(dict(operation='publish', metadata=dict(_pid=256))), @@ -447,56 +449,56 @@ class TestUploads: assert rv.status_code == 401 def test_post_metadata_and_republish( - self, client, proc_infra, admin_user_auth, test_user_auth, test_user, + self, api, proc_infra, admin_user_auth, test_user_auth, test_user, other_test_user, no_warn, example_user_metadata): - rv = client.put('/uploads/?local_path=%s' % example_file, headers=test_user_auth) + rv = api.put('/uploads/?local_path=%s' % example_file, headers=test_user_auth) upload = self.assert_upload(rv.data) - self.assert_processing(client, test_user_auth, upload['upload_id']) + self.assert_processing(api, test_user_auth, upload['upload_id']) metadata = dict(**example_user_metadata) metadata['_upload_time'] = datetime.datetime.utcnow().isoformat() - self.assert_published(client, admin_user_auth, upload['upload_id'], proc_infra, metadata) - self.assert_published(client, admin_user_auth, upload['upload_id'], proc_infra, metadata, publish_with_metadata=False) + self.assert_published(api, admin_user_auth, upload['upload_id'], proc_infra, metadata) + self.assert_published(api, admin_user_auth, upload['upload_id'], proc_infra, metadata, publish_with_metadata=False) - def test_post_re_process(self, client, published, test_user_auth, monkeypatch): + def test_post_re_process(self, api, published, test_user_auth, monkeypatch): monkeypatch.setattr('nomad.config.version', 're_process_test_version') monkeypatch.setattr('nomad.config.commit', 're_process_test_commit') upload_id = published.upload_id - rv = client.post( + rv = api.post( '/uploads/%s' % upload_id, headers=test_user_auth, data=json.dumps(dict(operation='re-process')), content_type='application/json') assert rv.status_code == 200 - assert self.block_until_completed(client, upload_id, test_user_auth) is not None + assert self.block_until_completed(api, upload_id, test_user_auth) is not None # TODO validate metadata (or all input models in API for that matter) - # def test_post_bad_metadata(self, client, proc_infra, test_user_auth, postgres): - # rv = client.put('/uploads/?local_path=%s' % example_file, headers=test_user_auth) + # def test_post_bad_metadata(self, api, proc_infra, test_user_auth, postgres): + # rv = api.put('/uploads/?local_path=%s' % example_file, headers=test_user_auth) # upload = self.assert_upload(rv.data) - # self.assert_processing(client, test_user_auth, upload['upload_id']) - # rv = client.post( + # self.assert_processing(api, test_user_auth, upload['upload_id']) + # rv = api.post( # '/uploads/%s' % upload['upload_id'], # headers=test_user_auth, # data=json.dumps(dict(operation='publish', metadata=dict(doesnotexist='hi'))), # content_type='application/json') # assert rv.status_code == 400 - def test_potcar(self, client, proc_infra, test_user_auth): + def test_potcar(self, api, proc_infra, test_user_auth): # only the owner, shared with people are supposed to download the original potcar file example_file = 'tests/data/proc/examples_potcar.zip' - rv = client.put('/uploads/?local_path=%s' % example_file, headers=test_user_auth) + rv = api.put('/uploads/?local_path=%s' % example_file, headers=test_user_auth) upload = self.assert_upload(rv.data) upload_id = upload['upload_id'] - self.assert_processing(client, test_user_auth, upload_id) - self.assert_published(client, test_user_auth, upload_id, proc_infra, with_coe_repo=True) - rv = client.get('/raw/%s/examples_potcar/POTCAR' % upload_id) + self.assert_processing(api, test_user_auth, upload_id) + self.assert_published(api, test_user_auth, upload_id, proc_infra, with_coe_repo=True) + rv = api.get('/raw/%s/examples_potcar/POTCAR' % upload_id) assert rv.status_code == 401 - rv = client.get('/raw/%s/examples_potcar/POTCAR' % upload_id, headers=test_user_auth) + rv = api.get('/raw/%s/examples_potcar/POTCAR' % upload_id, headers=test_user_auth) assert rv.status_code == 200 - rv = client.get('/raw/%s/examples_potcar/POTCAR.stripped' % upload_id) + rv = api.get('/raw/%s/examples_potcar/POTCAR.stripped' % upload_id) assert rv.status_code == 200 @@ -526,10 +528,10 @@ class UploadFilesBasedTests: [False, False, False], # in public, public, for different user [False, False, None] # in public, public, for guest ], indirect=True) - def wrapper(self, client, test_data, *args, **kwargs): + def wrapper(self, api, test_data, *args, **kwargs): upload, authorized, auth_headers = test_data try: - func(self, client, upload, auth_headers, *args, **kwargs) + func(self, api, upload, auth_headers, *args, **kwargs) except AssertionError as assertion: assertion_str = str(assertion) if not authorized: @@ -552,9 +554,9 @@ class UploadFilesBasedTests: [True, None, True], # in staging [False, False, None], # in public ], indirect=True) - def wrapper(self, client, test_data, *args, **kwargs): + def wrapper(self, api, test_data, *args, **kwargs): upload, _, auth_headers = test_data - func(self, client, upload, auth_headers, *args, **kwargs) + func(self, api, upload, auth_headers, *args, **kwargs) UploadFilesBasedTests.fix_signature(func, wrapper) return wrapper @@ -600,32 +602,32 @@ class UploadFilesBasedTests: class TestArchive(UploadFilesBasedTests): @UploadFilesBasedTests.check_authorizaton - def test_get(self, client, upload, auth_headers): - rv = client.get('/archive/%s/0' % upload, headers=auth_headers) + def test_get(self, api, upload, auth_headers): + rv = api.get('/archive/%s/0' % upload, headers=auth_headers) assert rv.status_code == 200 assert json.loads(rv.data) is not None @UploadFilesBasedTests.ignore_authorization - def test_get_signed(self, client, upload, _, test_user_signature_token): - rv = client.get('/archive/%s/0?token=%s' % (upload, test_user_signature_token)) + def test_get_signed(self, api, upload, _, test_user_signature_token): + rv = api.get('/archive/%s/0?token=%s' % (upload, test_user_signature_token)) assert rv.status_code == 200 assert json.loads(rv.data) is not None @UploadFilesBasedTests.check_authorizaton - def test_get_calc_proc_log(self, client, upload, auth_headers): - rv = client.get('/archive/logs/%s/0' % upload, headers=auth_headers) + def test_get_calc_proc_log(self, api, upload, auth_headers): + rv = api.get('/archive/logs/%s/0' % upload, headers=auth_headers) assert rv.status_code == 200 assert len(rv.data) > 0 @UploadFilesBasedTests.ignore_authorization - def test_get_calc_proc_log_signed(self, client, upload, _, test_user_signature_token): - rv = client.get('/archive/logs/%s/0?token=%s' % (upload, test_user_signature_token)) + def test_get_calc_proc_log_signed(self, api, upload, _, test_user_signature_token): + rv = api.get('/archive/logs/%s/0?token=%s' % (upload, test_user_signature_token)) assert rv.status_code == 200 assert len(rv.data) > 0 @UploadFilesBasedTests.ignore_authorization - def test_get_non_existing_archive(self, client, upload, auth_headers): - rv = client.get('/archive/%s' % 'doesnt/exist', headers=auth_headers) + def test_get_non_existing_archive(self, api, upload, auth_headers): + rv = api.get('/archive/%s' % 'doesnt/exist', headers=auth_headers) assert rv.status_code == 404 @pytest.mark.parametrize('info', [ @@ -633,8 +635,8 @@ class TestArchive(UploadFilesBasedTests): 'all.experimental.nomadmetainfo.json', 'vasp.nomadmetainfo.json', 'mpes.nomadmetainfo.json']) - def test_get_metainfo(self, client, info): - rv = client.get('/archive/metainfo/%s' % info) + def test_get_metainfo(self, api, info): + rv = api.get('/archive/metainfo/%s' % info) assert rv.status_code == 200 metainfo = json.loads((rv.data)) assert len(metainfo) > 0 @@ -689,36 +691,36 @@ class TestRepo(): return data - def test_own_calc(self, client, example_elastic_calcs, no_warn, test_user_auth): - rv = client.get('/repo/0/1', headers=test_user_auth) + def test_own_calc(self, api, example_elastic_calcs, no_warn, test_user_auth): + rv = api.get('/repo/0/1', headers=test_user_auth) assert rv.status_code == 200 - def test_public_calc(self, client, example_elastic_calcs, no_warn, other_test_user_auth): - rv = client.get('/repo/0/1', headers=other_test_user_auth) + def test_public_calc(self, api, example_elastic_calcs, no_warn, other_test_user_auth): + rv = api.get('/repo/0/1', headers=other_test_user_auth) assert rv.status_code == 200 - def test_embargo_calc(self, client, example_elastic_calcs, no_warn, test_user_auth): - rv = client.get('/repo/0/4', headers=test_user_auth) + def test_embargo_calc(self, api, example_elastic_calcs, no_warn, test_user_auth): + rv = api.get('/repo/0/4', headers=test_user_auth) assert rv.status_code == 401 - def test_own_embargo_calc(self, client, example_elastic_calcs, no_warn, other_test_user_auth): - rv = client.get('/repo/0/4', headers=other_test_user_auth) + def test_own_embargo_calc(self, api, example_elastic_calcs, no_warn, other_test_user_auth): + rv = api.get('/repo/0/4', headers=other_test_user_auth) assert rv.status_code == 200 - def test_staging_calc(self, client, example_elastic_calcs, no_warn, test_user_auth): - rv = client.get('/repo/0/3', headers=test_user_auth) + def test_staging_calc(self, api, example_elastic_calcs, no_warn, test_user_auth): + rv = api.get('/repo/0/3', headers=test_user_auth) assert rv.status_code == 401 - def test_own_staging_calc(self, client, example_elastic_calcs, no_warn, other_test_user_auth): - rv = client.get('/repo/0/3', headers=other_test_user_auth) + def test_own_staging_calc(self, api, example_elastic_calcs, no_warn, other_test_user_auth): + rv = api.get('/repo/0/3', headers=other_test_user_auth) assert rv.status_code == 200 - def test_non_existing_calcs(self, client, example_elastic_calcs, test_user_auth): - rv = client.get('/repo/0/10', headers=test_user_auth) + def test_non_existing_calcs(self, api, example_elastic_calcs, test_user_auth): + rv = api.get('/repo/0/10', headers=test_user_auth) assert rv.status_code == 404 - def test_search_datasets(self, client, example_elastic_calcs, no_warn, other_test_user_auth): - rv = client.get('/repo/?owner=all&datasets=true', headers=other_test_user_auth) + def test_search_datasets(self, api, example_elastic_calcs, no_warn, other_test_user_auth): + rv = api.get('/repo/?owner=all&datasets=true', headers=other_test_user_auth) data = self.assert_search(rv, 4) datasets = data.get('datasets', None) @@ -737,9 +739,9 @@ class TestRepo(): (0, 'staging', 'test_user'), (1, 'staging', 'other_test_user') ]) - def test_search_owner(self, client, example_elastic_calcs, no_warn, test_user_auth, other_test_user_auth, calcs, owner, auth): + def test_search_owner(self, api, example_elastic_calcs, no_warn, test_user_auth, other_test_user_auth, calcs, owner, auth): auth = dict(none=None, test_user=test_user_auth, other_test_user=other_test_user_auth).get(auth) - rv = client.get('/repo/?owner=%s' % owner, headers=auth) + rv = api.get('/repo/?owner=%s' % owner, headers=auth) data = self.assert_search(rv, calcs) results = data.get('results', None) if calcs > 0: @@ -757,7 +759,7 @@ class TestRepo(): (1, today, None), (2, None, today) ]) - def test_search_time(self, client, example_elastic_calcs, no_warn, calcs, start, end): + def test_search_time(self, api, example_elastic_calcs, no_warn, calcs, start, end): query_string = '' if start is not None: query_string = 'from_time=%s' % rfc3339DateTime.format(start) @@ -768,7 +770,7 @@ class TestRepo(): if query_string != '': query_string = '?%s' % query_string - rv = client.get('/repo/%s' % query_string) + rv = api.get('/repo/%s' % query_string) self.assert_search(rv, calcs) @pytest.mark.parametrize('calcs, quantity, value', [ @@ -789,10 +791,10 @@ class TestRepo(): (2, 'quantities', ['wyckoff_letters_primitive', 'hall_number']), (0, 'quantities', 'dos') ]) - def test_search_parameters(self, client, example_elastic_calcs, no_warn, test_user_auth, calcs, quantity, value): + def test_search_parameters(self, api, example_elastic_calcs, no_warn, test_user_auth, calcs, quantity, value): query_string = urlencode({quantity: value, 'statistics': True}, doseq=True) - rv = client.get('/repo/?%s' % query_string, headers=test_user_auth) + rv = api.get('/repo/?%s' % query_string, headers=test_user_auth) logger.debug('run search quantities test', query_string=query_string) data = self.assert_search(rv, calcs) @@ -806,20 +808,20 @@ class TestRepo(): metrics_permutations = [[], search.metrics_names] + [[metric] for metric in search.metrics_names] - def test_search_admin(self, client, example_elastic_calcs, no_warn, admin_user_auth): - rv = client.get('/repo/?owner=admin', headers=admin_user_auth) + def test_search_admin(self, api, example_elastic_calcs, no_warn, admin_user_auth): + rv = api.get('/repo/?owner=admin', headers=admin_user_auth) self.assert_search(rv, 4) - def test_search_admin_auth(self, client, example_elastic_calcs, no_warn, test_user_auth): - rv = client.get('/repo/?owner=admin', headers=test_user_auth) + def test_search_admin_auth(self, api, example_elastic_calcs, no_warn, test_user_auth): + rv = api.get('/repo/?owner=admin', headers=test_user_auth) assert rv.status_code == 401 - rv = client.get('/repo/?owner=admin') + rv = api.get('/repo/?owner=admin') assert rv.status_code == 401 @pytest.mark.parametrize('metrics', metrics_permutations) - def test_search_total_metrics(self, client, example_elastic_calcs, no_warn, metrics): - rv = client.get('/repo/?%s' % urlencode(dict(metrics=metrics, statistics=True, datasets=True), doseq=True)) + def test_search_total_metrics(self, api, example_elastic_calcs, no_warn, metrics): + rv = api.get('/repo/?%s' % urlencode(dict(metrics=metrics, statistics=True, datasets=True), doseq=True)) assert rv.status_code == 200, str(rv.data) data = json.loads(rv.data) total_metrics = data.get('statistics', {}).get('total', {}).get('all', None) @@ -829,8 +831,8 @@ class TestRepo(): assert metric in total_metrics @pytest.mark.parametrize('metrics', metrics_permutations) - def test_search_aggregation_metrics(self, client, example_elastic_calcs, no_warn, metrics): - rv = client.get('/repo/?%s' % urlencode(dict(metrics=metrics, statistics=True, datasets=True), doseq=True)) + def test_search_aggregation_metrics(self, api, example_elastic_calcs, no_warn, metrics): + rv = api.get('/repo/?%s' % urlencode(dict(metrics=metrics, statistics=True, datasets=True), doseq=True)) assert rv.status_code == 200 data = json.loads(rv.data) for name, quantity in data.get('statistics').items(): @@ -842,16 +844,16 @@ class TestRepo(): else: assert len(metrics_result) == 1 # code_runs is the only metric for authors - def test_search_date_histogram(self, client, example_elastic_calcs, no_warn): - rv = client.get('/repo/?date_histogram=true&metrics=total_energies') + def test_search_date_histogram(self, api, example_elastic_calcs, no_warn): + rv = api.get('/repo/?date_histogram=true&metrics=total_energies') assert rv.status_code == 200 data = json.loads(rv.data) histogram = data.get('statistics').get('date_histogram') assert len(histogram) > 0 @pytest.mark.parametrize('n_results, page, per_page', [(2, 1, 5), (1, 1, 1), (0, 2, 3)]) - def test_search_pagination(self, client, example_elastic_calcs, no_warn, n_results, page, per_page): - rv = client.get('/repo/?page=%d&per_page=%d&statistics=true' % (page, per_page)) + def test_search_pagination(self, api, example_elastic_calcs, no_warn, n_results, page, per_page): + rv = api.get('/repo/?page=%d&per_page=%d&statistics=true' % (page, per_page)) assert rv.status_code == 200 data = json.loads(rv.data) results = data.get('results', None) @@ -863,8 +865,8 @@ class TestRepo(): ('1', 'formula', -1), ('2', 'formula', 1), ('2', 'basis_set', -1), ('1', 'basis_set', 1), (None, 'authors', -1)]) - def test_search_order(self, client, example_elastic_calcs, no_warn, first, order_by, order): - rv = client.get('/repo/?order_by=%s&order=%d' % (order_by, order)) + def test_search_order(self, api, example_elastic_calcs, no_warn, first, order_by, order): + rv = api.get('/repo/?order_by=%s&order=%d' % (order_by, order)) assert rv.status_code == 200 data = json.loads(rv.data) results = data.get('results', None) @@ -874,11 +876,11 @@ class TestRepo(): assert results[0]['calc_id'] == first @pytest.mark.parametrize('n_results, size', [(2, None), (2, 5), (1, 1)]) - def test_search_scroll(self, client, example_elastic_calcs, no_warn, n_results, size): + def test_search_scroll(self, api, example_elastic_calcs, no_warn, n_results, size): if size is not None: - rv = client.get('/repo/?scroll=1,&per_page=%d' % size) + rv = api.get('/repo/?scroll=1,&per_page=%d' % size) else: - rv = client.get('/repo/?scroll=1') + rv = api.get('/repo/?scroll=1') assert rv.status_code == 200 data = json.loads(rv.data) @@ -891,7 +893,7 @@ class TestRepo(): has_another_page = False while scroll_id is not None: - rv = client.get('/repo/?scroll=1&scroll_id=%s' % scroll_id) + rv = api.get('/repo/?scroll=1&scroll_id=%s' % scroll_id) data = json.loads(rv.data) scroll_id = data.get('scroll', {}).get('scroll_id', None) has_another_page |= len(data.get('results')) > 0 @@ -899,8 +901,8 @@ class TestRepo(): if n_results < 2: assert has_another_page - def test_search_user_authrequired(self, client, example_elastic_calcs, no_warn): - rv = client.get('/repo/?owner=user') + def test_search_user_authrequired(self, api, example_elastic_calcs, no_warn): + rv = api.get('/repo/?owner=user') assert rv.status_code == 401 @pytest.mark.parametrize('calcs, quantity, value', [ @@ -912,8 +914,8 @@ class TestRepo(): (2, 'files', 'test/mainfile.txt'), (0, 'quantities', 'dos') ]) - def test_quantity_search(self, client, example_elastic_calcs, no_warn, test_user_auth, calcs, quantity, value): - rv = client.get('/repo/%s' % quantity, headers=test_user_auth) + def test_quantity_search(self, api, example_elastic_calcs, no_warn, test_user_auth, calcs, quantity, value): + rv = api.get('/repo/%s' % quantity, headers=test_user_auth) assert rv.status_code == 200 data = json.loads(rv.data) values = data['quantity']['values'] @@ -923,8 +925,8 @@ class TestRepo(): else: assert 0 == calcs - def test_quantity_search_after(self, client, example_elastic_calcs, no_warn, test_user_auth): - rv = client.get('/repo/atoms?size=1') + def test_quantity_search_after(self, api, example_elastic_calcs, no_warn, test_user_auth): + rv = api.get('/repo/atoms?size=1') assert rv.status_code == 200 data = json.loads(rv.data) @@ -935,7 +937,7 @@ class TestRepo(): value = list(quantity['values'].keys())[0] while True: - rv = client.get('/repo/atoms?size=1&after=%s' % after) + rv = api.get('/repo/atoms?size=1&after=%s' % after) assert rv.status_code == 200 data = json.loads(rv.data) @@ -954,9 +956,9 @@ class TestRepo(): (3, True, True), (3, False, False), (4, True, True), (4, False, False)]) def test_resolve_pid( - self, client, example_elastic_calcs, other_test_user_auth, pid, with_login, + self, api, example_elastic_calcs, other_test_user_auth, pid, with_login, success, no_warn): - rv = client.get( + rv = api.get( '/repo/pid/%d' % pid, headers=other_test_user_auth if with_login else {}) assert rv.status_code == 200 if success else 404 @@ -967,64 +969,64 @@ class TestRepo(): class TestRaw(UploadFilesBasedTests): - def test_raw_file_from_calc(self, client, non_empty_processed, test_user_auth): + def test_raw_file_from_calc(self, api, non_empty_processed, test_user_auth): calc = list(non_empty_processed.calcs)[0] url = '/raw/calc/%s/%s/%s' % ( non_empty_processed.upload_id, calc.calc_id, os.path.basename(calc.mainfile)) - rv = client.get(url, headers=test_user_auth) + rv = api.get(url, headers=test_user_auth) assert rv.status_code == 200 assert len(rv.data) > 0 url = '/raw/calc/%s/%s/' % (non_empty_processed.upload_id, calc.calc_id) - rv = client.get(url, headers=test_user_auth) + rv = api.get(url, headers=test_user_auth) assert rv.status_code == 200 result = json.loads(rv.data) assert len(result['contents']) > 0 @UploadFilesBasedTests.check_authorizaton - def test_raw_file(self, client, upload, auth_headers): + def test_raw_file(self, api, upload, auth_headers): url = '/raw/%s/%s' % (upload, example_file_mainfile) - rv = client.get(url, headers=auth_headers) + rv = api.get(url, headers=auth_headers) assert rv.status_code == 200 assert len(rv.data) > 0 @UploadFilesBasedTests.check_authorizaton - def test_raw_file_partial(self, client, upload, auth_headers): + def test_raw_file_partial(self, api, upload, auth_headers): url = '/raw/%s/%s?offset=0&length=20' % (upload, example_file_mainfile) - rv = client.get(url, headers=auth_headers) + rv = api.get(url, headers=auth_headers) assert rv.status_code == 200 start_data = rv.data assert len(start_data) == 20 url = '/raw/%s/%s?offset=10&length=10' % (upload, example_file_mainfile) - rv = client.get(url, headers=auth_headers) + rv = api.get(url, headers=auth_headers) assert rv.status_code == 200 next_data = rv.data assert len(rv.data) == 10 assert start_data[10:] == next_data @UploadFilesBasedTests.ignore_authorization - def test_raw_file_signed(self, client, upload, _, test_user_signature_token): + def test_raw_file_signed(self, api, upload, _, test_user_signature_token): url = '/raw/%s/%s?token=%s' % (upload, example_file_mainfile, test_user_signature_token) - rv = client.get(url) + rv = api.get(url) assert rv.status_code == 200 assert len(rv.data) > 0 @UploadFilesBasedTests.ignore_authorization - def test_raw_file_missing_file(self, client, upload, auth_headers): + def test_raw_file_missing_file(self, api, upload, auth_headers): url = '/raw/%s/does/not/exist' % upload - rv = client.get(url, headers=auth_headers) + rv = api.get(url, headers=auth_headers) assert rv.status_code == 404 data = json.loads(rv.data) assert 'files' not in data @pytest.mark.parametrize('compress', [True, False]) @UploadFilesBasedTests.ignore_authorization - def test_raw_file_wildcard(self, client, upload, auth_headers, compress): + def test_raw_file_wildcard(self, api, upload, auth_headers, compress): url = '/raw/%s/examples*' % upload if compress: url = '%s?compress=1' % url - rv = client.get(url, headers=auth_headers) + rv = api.get(url, headers=auth_headers) assert rv.status_code == 200 assert len(rv.data) > 0 @@ -1033,25 +1035,25 @@ class TestRaw(UploadFilesBasedTests): assert len(zip_file.namelist()) == len(example_file_contents) @UploadFilesBasedTests.ignore_authorization - def test_raw_file_wildcard_missing(self, client, upload, auth_headers): + def test_raw_file_wildcard_missing(self, api, upload, auth_headers): url = '/raw/%s/does/not/exist*' % upload - rv = client.get(url, headers=auth_headers) + rv = api.get(url, headers=auth_headers) assert rv.status_code == 404 @UploadFilesBasedTests.ignore_authorization - def test_raw_file_missing_upload(self, client, upload, auth_headers): + def test_raw_file_missing_upload(self, api, upload, auth_headers): url = '/raw/doesnotexist/%s' % example_file_mainfile - rv = client.get(url, headers=auth_headers) + rv = api.get(url, headers=auth_headers) assert rv.status_code == 404 @pytest.mark.parametrize('compress', [True, False]) @UploadFilesBasedTests.check_authorizaton - def test_raw_files(self, client, upload, auth_headers, compress): + def test_raw_files(self, api, upload, auth_headers, compress): url = '/raw/%s?files=%s' % ( upload, ','.join(example_file_contents)) if compress: url = '%s&compress=1' % url - rv = client.get(url, headers=auth_headers) + rv = api.get(url, headers=auth_headers) assert rv.status_code == 200 assert len(rv.data) > 0 @@ -1060,9 +1062,9 @@ class TestRaw(UploadFilesBasedTests): assert len(zip_file.namelist()) == len(example_file_contents) @pytest.mark.parametrize('compress', [False, True]) - def test_raw_files_from_query_upload_id(self, client, non_empty_processed, test_user_auth, compress): + def test_raw_files_from_query_upload_id(self, api, non_empty_processed, test_user_auth, compress): url = '/raw/query?upload_id=%s&compress=%s' % (non_empty_processed.upload_id, 'true' if compress else 'false') - rv = client.get(url, headers=test_user_auth) + rv = api.get(url, headers=test_user_auth) assert rv.status_code == 200 assert len(rv.data) > 0 @@ -1074,10 +1076,10 @@ class TestRaw(UploadFilesBasedTests): {'atoms': 'Si'}, {'authors': 'Cooper, Sheldon'} ]) - def test_raw_files_from_query(self, client, processeds, test_user_auth, query_params): + def test_raw_files_from_query(self, api, processeds, test_user_auth, query_params): url = '/raw/query?%s' % urlencode(query_params) - rv = client.get(url, headers=test_user_auth) + rv = api.get(url, headers=test_user_auth) assert rv.status_code == 200 assert len(rv.data) > 0 @@ -1085,9 +1087,9 @@ class TestRaw(UploadFilesBasedTests): assert zip_file.testzip() is None assert len(zip_file.namelist()) == len(example_file_contents) * len(processeds) - def test_raw_files_from_empty_query(self, client, elastic): + def test_raw_files_from_empty_query(self, api, elastic): url = '/raw/query?upload_id=doesNotExist' - rv = client.get(url) + rv = api.get(url) assert rv.status_code == 200 assert len(rv.data) > 0 @@ -1096,10 +1098,10 @@ class TestRaw(UploadFilesBasedTests): assert len(zip_file.namelist()) == 0 @UploadFilesBasedTests.ignore_authorization - def test_raw_files_signed(self, client, upload, _, test_user_signature_token): + def test_raw_files_signed(self, api, upload, _, test_user_signature_token): url = '/raw/%s?files=%s&token=%s' % ( upload, ','.join(example_file_contents), test_user_signature_token) - rv = client.get(url) + rv = api.get(url) assert rv.status_code == 200 assert len(rv.data) > 0 @@ -1109,12 +1111,12 @@ class TestRaw(UploadFilesBasedTests): @pytest.mark.parametrize('compress', [True, False, None]) @UploadFilesBasedTests.check_authorizaton - def test_raw_files_post(self, client, upload, auth_headers, compress): + def test_raw_files_post(self, api, upload, auth_headers, compress): url = '/raw/%s' % upload data = dict(files=example_file_contents) if compress is not None: data.update(compress=compress) - rv = client.post(url, data=json.dumps(data), content_type='application/json', headers=auth_headers) + rv = api.post(url, data=json.dumps(data), content_type='application/json', headers=auth_headers) assert rv.status_code == 200 assert len(rv.data) > 0 @@ -1124,11 +1126,11 @@ class TestRaw(UploadFilesBasedTests): @pytest.mark.parametrize('compress', [True, False]) @UploadFilesBasedTests.ignore_authorization - def test_raw_files_missing_file(self, client, upload, auth_headers, compress): + def test_raw_files_missing_file(self, api, upload, auth_headers, compress): url = '/raw/%s?files=%s,missing/file.txt' % (upload, example_file_mainfile) if compress: url = '%s&compress=1' % url - rv = client.get(url, headers=auth_headers) + rv = api.get(url, headers=auth_headers) assert rv.status_code == 200 assert len(rv.data) > 0 @@ -1137,17 +1139,17 @@ class TestRaw(UploadFilesBasedTests): assert len(zip_file.namelist()) == 1 @UploadFilesBasedTests.ignore_authorization - def test_raw_files_missing_upload(self, client, upload, auth_headers): + def test_raw_files_missing_upload(self, api, upload, auth_headers): url = '/raw/doesnotexist?files=shoud/not/matter.txt' - rv = client.get(url, headers=auth_headers) + rv = api.get(url, headers=auth_headers) assert rv.status_code == 404 @pytest.mark.parametrize('path', ['examples_template', 'examples_template/']) @UploadFilesBasedTests.ignore_authorization - def test_raw_files_list(self, client, upload, auth_headers, path): + def test_raw_files_list(self, api, upload, auth_headers, path): url = '/raw/%s/%s' % (upload, path) - rv = client.get(url, headers=auth_headers) + rv = api.get(url, headers=auth_headers) assert rv.status_code == 200 data = json.loads(rv.data) @@ -1160,17 +1162,17 @@ class TestRaw(UploadFilesBasedTests): assert '1.aux' in list(content['name'] for content in data['contents']) @UploadFilesBasedTests.ignore_authorization - def test_raw_files_list_missing(self, client, upload, auth_headers): + def test_raw_files_list_missing(self, api, upload, auth_headers): url = '/raw/%s/examples_' % upload - rv = client.get(url, headers=auth_headers) + rv = api.get(url, headers=auth_headers) assert rv.status_code == 404 class TestMirror: - def test_upload(self, client, published, admin_user_auth, no_warn): + def test_upload(self, api, published, admin_user_auth, no_warn): url = '/mirror/%s' % published.upload_id - rv = client.get(url, headers=admin_user_auth) + rv = api.get(url, headers=admin_user_auth) assert rv.status_code == 200 data = json.loads(rv.data) @@ -1180,17 +1182,11 @@ class TestMirror: assert len(data['calcs']) == len(published.calcs) assert data['upload_files_path'] == published.upload_files.os_path - def test_uploads(self, client, published, admin_user_auth, no_warn): - rv = client.post( + def test_uploads(self, api, published, admin_user_auth, no_warn): + rv = api.post( '/mirror/', content_type='application/json', data='{"query":{}}', headers=admin_user_auth) assert rv.status_code == 200, rv.data data = json.loads(rv.data) assert data[0]['upload_id'] == published.upload_id - - -def test_docs(client): - rv = client.get('/docs/index.html') - rv = client.get('/docs/introduction.html') - assert rv.status_code == 200 diff --git a/tests/app/test_app.py b/tests/app/test_app.py new file mode 100644 index 0000000000000000000000000000000000000000..2ede6169d152da80450fc270483d15354b6fc013 --- /dev/null +++ b/tests/app/test_app.py @@ -0,0 +1,81 @@ + +# Copyright 2018 Markus Scheidgen +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an"AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import json + +from tests.utils import assert_log +from tests.app import resource # pylint: disable=unused-import + + +class BlueprintClient(): + def __init__(self, app_client, blueprint_url_prefix): + self.app_client = app_client + self.blueprint_url_prefix = blueprint_url_prefix.strip('/') + + def _delegate(self, method, path, *args, **kwargs): + app_client_function = getattr(self.app_client, method) + prefixed_path = '/%s/%s' % (self.blueprint_url_prefix, path.lstrip('/')) + return app_client_function(prefixed_path, *args, **kwargs) + + def get(self, *args, **kwargs): + return self._delegate('get', *args, **kwargs) + + def post(self, *args, **kwargs): + return self._delegate('post', *args, **kwargs) + + def put(self, *args, **kwargs): + return self._delegate('put', *args, **kwargs) + + def delete(self, *args, **kwargs): + return self._delegate('delete', *args, **kwargs) + + +def test_alive(client): + rv = client.get('/alive') + assert rv.status_code == 200 + + +def test_internal_server_error_get(client, caplog): + rv = client.get('/api/test/ise?test_arg=value') + assert rv.status_code == 500 + record = assert_log(caplog, 'error', 'internal server error') + data = json.loads(record.message) + + assert data['blueprint'] == 'api' + assert data['endpoint'] == 'api.test_internal_server_error_resource' + assert data['method'] == 'GET' + assert data['args']['test_arg'] == 'value' + + +def test_internal_server_error_post(client, caplog): + rv = client.post( + '/api/test/ise', + content_type='application/json', + data=json.dumps(dict(test_arg='value'))) + assert rv.status_code == 500 + record = assert_log(caplog, 'error', 'internal server error') + data = json.loads(record.message) + + assert data['blueprint'] == 'api' + assert data['endpoint'] == 'api.test_internal_server_error_resource' + assert data['method'] == 'POST' + assert data['json']['test_arg'] == 'value' + + +def test_docs(client): + rv = client.get('/docs/index.html') + rv = client.get('/docs/introduction.html') + assert rv.status_code == 200 diff --git a/tests/conftest.py b/tests/conftest.py index d322b9f5a162a4cfee3c301883762e2b56775b0d..58c37f8b1a8129a189d8a6850a313c1ae89e3cd3 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -29,7 +29,7 @@ import datetime import base64 from bravado.client import SwaggerClient -from nomad import config, infrastructure, parsing, processing, coe_repo, api +from nomad import config, infrastructure, parsing, processing, coe_repo, app from tests import test_parsing, test_normalizing from tests.processing import test_data as test_processing @@ -84,8 +84,8 @@ def raw_files(raw_files_infra): @pytest.fixture(scope='function') def client(mongo): - api.app.config['TESTING'] = True - client = api.app.test_client() + app.app.config['TESTING'] = True + client = app.app.test_client() yield client @@ -325,14 +325,14 @@ def admin_user_auth(admin_user: coe_repo.User): @pytest.fixture(scope='function') def bravado(client, postgres, test_user_auth): http_client = FlaskTestHttpClient(client, headers=test_user_auth) - return SwaggerClient.from_url('/swagger.json', http_client=http_client) + return SwaggerClient.from_url('/api/swagger.json', http_client=http_client) @pytest.fixture(scope='function') def admin_user_bravado_client(client, admin_user_auth, monkeypatch): def create_client(): http_client = FlaskTestHttpClient(client, headers=admin_user_auth) - return SwaggerClient.from_url('/swagger.json', http_client=http_client) + return SwaggerClient.from_url('/api/swagger.json', http_client=http_client) monkeypatch.setattr('nomad.cli.client.create_client', create_client) @@ -341,7 +341,7 @@ def admin_user_bravado_client(client, admin_user_auth, monkeypatch): def test_user_bravado_client(client, test_user_auth, monkeypatch): def create_client(): http_client = FlaskTestHttpClient(client, headers=test_user_auth) - return SwaggerClient.from_url('/swagger.json', http_client=http_client) + return SwaggerClient.from_url('/api/swagger.json', http_client=http_client) monkeypatch.setattr('nomad.cli.client.create_client', create_client) diff --git a/tests/test_cli.py b/tests/test_cli.py index 9f9bb4cecf59aecad34128a0234ca127d48055e9..d9fd026d183d2255c23ae324a020e32746216e20 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -142,7 +142,7 @@ class TestClient: def test_local(self, client, published, admin_user_bravado_client, monkeypatch): def requests_get(url, stream, headers): assert stream - rv = client.get(url[url.index('/raw'):], headers=headers) + rv = client.get(url[url.index('/api/raw'):], headers=headers) assert rv.status_code == 200 return utils.POPO(iter_content=lambda *args, **kwargs: [bytes(rv.data)]) diff --git a/tests/test_migration.py b/tests/test_migration.py index f06d410569e9207b0144cdf9a76c2b6c38fc0ada..69aa97883526bbb56f5b294e51f02346528d6624 100644 --- a/tests/test_migration.py +++ b/tests/test_migration.py @@ -28,7 +28,6 @@ from nomad.infrastructure import repository_db_connection from tests.conftest import create_postgres_infra, create_auth_headers from tests.bravado_flask import FlaskTestHttpClient -from tests.test_api import create_auth_headers import tests.utils as test_utils from tests.test_search import assert_search_upload from tests.test_files import assert_upload_files @@ -194,7 +193,7 @@ def migrate_infra(migration, target_repo, proc_infra, client, monkeypatch): def create_client_for_user(auth): http_client = FlaskTestHttpClient(client, headers=auth) - return SwaggerClient.from_url('/swagger.json', http_client=http_client) + return SwaggerClient.from_url('/api/swagger.json', http_client=http_client) def create_client(): return create_client_for_user(infra.admin_auth) diff --git a/tests/utils.py b/tests/utils.py index 30e6ce1a7a7cab27201131586b7bbc736630f8c2..67f8340fab0142e4b93a0ce48a260677e38091d0 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -17,9 +17,10 @@ from typing import Type import json from contextlib import contextmanager +from logging import LogRecord -def assert_log(caplog, level, event_part): +def assert_log(caplog, level: str, event_part: str) -> LogRecord: """ Assert whether a log message exists in the logs of the tests at a certain level. @@ -35,14 +36,16 @@ def assert_log(caplog, level, event_part): contain this string. """ - record_receieved = False + record = None for record in caplog.get_records(when='call'): if record.levelname == level: if (event_part in json.loads(record.msg)['event']): - record_receieved = True + record = record # No need to look for more matches since we aren't counting matches. break - assert(record_receieved) + assert record is not None + + return record @contextmanager