Commit 73efe701 authored by Markus Scheidgen's avatar Markus Scheidgen
Browse files

Refined tests.

parent a1a97b70
Pipeline #43638 passed with stages
in 17 minutes and 14 seconds
......@@ -78,6 +78,7 @@ def setup_mongo():
global mongo_client
mongo_client = connect(db=config.mongo.db_name, host=config.mongo.host, port=config.mongo.port)
logger.info('setup mongo connection')
return mongo_client
def setup_elastic():
......
......@@ -194,7 +194,7 @@ class Proc(Document, metaclass=ProcMetaclass):
def fail(self, *errors, log_level=logging.ERROR, **kwargs):
""" Allows to fail the process. Takes strings or exceptions as args. """
assert self.tasks_running, 'Cannot fail a completed process.'
assert self.process_running or self.tasks_running, 'Cannot fail a completed process.'
failed_with_exception = False
......@@ -219,7 +219,7 @@ class Proc(Document, metaclass=ProcMetaclass):
def warning(self, *warnings, log_level=logging.WARNING, **kwargs):
""" Allows to save warnings. Takes strings or exceptions as args. """
assert self.tasks_running
assert self.process_running or self.tasks_running
logger = self.get_logger(**kwargs)
......
......@@ -239,7 +239,7 @@ class Calc(Proc):
# index in search
with utils.timer(logger, 'indexed', step='persist'):
search.Entry.from_calc_with_metadata(calc_with_metadata, published=False).persist()
search.Entry.from_calc_with_metadata(calc_with_metadata, published=False).save()
# persist the archive
with utils.timer(
......
......@@ -16,9 +16,6 @@
This module represents calculations in elastic search.
"""
from elasticsearch.exceptions import ConflictError, ConnectionTimeout
from datetime import datetime
import time
from elasticsearch_dsl import Document, InnerDoc, Keyword, Text, Date, \
Nested, Boolean, Search
......@@ -127,39 +124,7 @@ class Entry(Document):
@classmethod
def add_upload(cls, source: datamodel.UploadWithMetadata):
for calc in source.calcs:
cls.from_calc_with_metadata(calc).save(op_type='create')
def persist(self, **kwargs):
"""
Persist this entry to elastic search. Kwargs are passed to elastic search.
Raises:
AlreadyExists: If the calculation already exists in elastic search. We use
the elastic document lock here. The elastic document is IDed via the
``calc_id``.
"""
try:
# In practive es operation might fail due to timeout under heavy loads/
# bad configuration. Retries with a small delay is a pragmatic solution.
e_after_retries = None
for _ in range(0, 2):
try:
self.save(op_type='create', **kwargs)
e_after_retries = None
break
except ConnectionTimeout as e:
e_after_retries = e
time.sleep(1)
except ConflictError as e: # this should never happen, but happens
e_after_retries = e
time.sleep(1)
else:
raise e
if e_after_retries is not None:
# if we had and exception and could not fix with retries, throw it
raise e_after_retries # pylint: disable=E0702
except ConflictError:
raise AlreadyExists('Calculation %s/%s does already exist.' % (self.upload_id, self.calc_id))
cls.from_calc_with_metadata(calc).save()
@classmethod
def update_by_query(cls, upload_id, script):
......@@ -191,14 +156,3 @@ class Entry(Document):
def es_search(body):
""" Perform an elasticsearch and not elasticsearch_dsl search on the Calc index. """
return infrastructure.elastic_client.search(index=config.elastic.index_name, body=body)
@property
def json_dict(self):
""" A json serializable dictionary representation. """
data = self.to_dict()
upload_time = data.get('upload_time', None)
if upload_time is not None and isinstance(upload_time, datetime):
data['upload_time'] = data['upload_time'].isoformat()
return {key: value for key, value in data.items() if value is not None}
......@@ -24,10 +24,17 @@ Otherwise the test submodules follow the names of the nomad code modules.
from nomad import config
# For convinience we test the api without path prefix.
# This should be setup with a fixture with in conftest.py, but it will be too late.
# After importing the api module, the config values have already been used and
# This should be setup with fixtures with in conftest.py, but it will be too late.
# After importing the api/infrastructure module, the config values have already been used and
# changing them afterwards does not change anything anymore.
# For convinience we test the api without path prefix.
services_config = config.services._asdict()
services_config.update(api_base_path='')
config.services = config.NomadServicesConfig(**services_config)
# We use a mocked in memory mongo version.
mongo_config = config.mongo._asdict()
mongo_config.update(host='mongomock://localhost')
config.mongo = config.MongoConfig(**mongo_config)
......@@ -16,8 +16,6 @@ from typing import Tuple
import pytest
import logging
from sqlalchemy.orm import Session
from mongoengine import connect
from mongoengine.connection import disconnect
from contextlib import contextmanager
from collections import namedtuple
from smtpd import SMTPServer
......@@ -79,7 +77,7 @@ def raw_files(raw_files_infra):
@pytest.fixture(scope='function')
def client(monkeysession):
def client(monkeysession, mongo):
api.app.config['TESTING'] = True
client = api.app.test_client()
......@@ -98,23 +96,32 @@ def celery_config():
}
@pytest.fixture(scope='session')
def purged_app(celery_session_app):
"""
Purges all pending tasks of the celery app before test. This is necessary to
remove tasks from the queue that might be 'left over' from prior tests.
"""
celery_session_app.control.purge()
yield celery_session_app
@pytest.fixture(scope='session')
def worker(celery_session_worker):
""" Provides a clean worker (no old tasks) per function. Waits for all tasks to be completed. """
pass
@pytest.fixture(scope='function')
def mongo(monkeypatch):
""" Provides a cleaned mocked mongo per function. """
disconnect()
connection = connect('test_db', host='mongomock://localhost')
monkeypatch.setattr('nomad.infrastructure.setup_mongo', lambda **kwargs: None)
@pytest.fixture(scope='session')
def mongo_infra(monkeysession):
return infrastructure.setup_mongo()
yield
connection.drop_database('test_db')
@pytest.fixture(scope='function')
def mongo(mongo_infra):
""" Provides a cleaned mocked mongo per function. """
mongo_infra.drop_database('test_db')
return mongo_infra
@pytest.fixture(scope='session')
......
......@@ -62,7 +62,7 @@ def run_processing(uploaded_id: str, test_user) -> Upload:
assert upload.current_task == 'uploading'
upload.process_upload() # pylint: disable=E1101
upload.block_until_complete(interval=.1)
upload.block_until_complete(interval=.01)
return upload
......@@ -105,7 +105,7 @@ def test_processing(processed, no_warn, mails):
@pytest.mark.timeout(10)
def test_processing_with_warning(raw_files, worker, test_user, with_warn):
def test_processing_with_warning(proc_infra, test_user, with_warn):
example_file = 'tests/data/proc/examples_with_warning_template.zip'
example_upload_id = os.path.basename(example_file).replace('.zip', '')
upload_files = ArchiveBasedStagingUploadFiles(example_upload_id, create=True)
......@@ -116,7 +116,7 @@ def test_processing_with_warning(raw_files, worker, test_user, with_warn):
@pytest.mark.timeout(10)
def test_process_non_existing(worker, test_user, with_error):
def test_process_non_existing(proc_infra, test_user, with_error):
upload = run_processing('__does_not_exist', test_user)
assert not upload.tasks_running
......@@ -127,7 +127,7 @@ def test_process_non_existing(worker, test_user, with_error):
@pytest.mark.parametrize('task', ['extracting', 'parse_all', 'cleanup', 'parsing'])
@pytest.mark.timeout(10)
def test_task_failure(monkeypatch, uploaded, worker, task, test_user, with_error):
def test_task_failure(monkeypatch, uploaded, worker, task, proc_infra, test_user, with_error):
# mock the task method to through exceptions
if hasattr(Upload, task):
cls = Upload
......
......@@ -236,7 +236,7 @@ class TestUploads:
@pytest.mark.parametrize('mode', ['multipart', 'stream', 'local_path'])
@pytest.mark.parametrize('name', [None, 'test_name'])
def test_put(self, client, test_user_auth, proc_infra, example_upload, mode, name):
def test_put(self, client, test_user_auth, proc_infra, example_upload, mode, name, no_warn):
file = example_upload
if name:
url = '/uploads/?name=%s' % name
......@@ -281,7 +281,7 @@ class TestUploads:
yield True
monkeypatch.setattr('nomad.processing.data.Upload.cleanup', old_cleanup)
def test_delete_during_processing(self, client, test_user_auth, proc_infra, slow_processing):
def test_delete_during_processing(self, client, test_user_auth, proc_infra, slow_processing, no_warn):
rv = client.put('/uploads/?local_path=%s' % example_file, headers=test_user_auth)
upload = self.assert_upload(rv.data)
assert upload['tasks_running']
......@@ -289,7 +289,7 @@ class TestUploads:
assert rv.status_code == 400
self.assert_processing(client, test_user_auth, upload['upload_id'])
def test_delete_unstaged(self, client, test_user_auth, proc_infra):
def test_delete_unstaged(self, client, test_user_auth, proc_infra, no_warn):
rv = client.put('/uploads/?local_path=%s' % example_file, headers=test_user_auth)
upload = self.assert_upload(rv.data)
self.assert_processing(client, test_user_auth, upload['upload_id'])
......@@ -297,7 +297,7 @@ class TestUploads:
rv = client.delete('/uploads/%s' % upload['upload_id'], headers=test_user_auth)
assert rv.status_code == 404
def test_delete(self, client, test_user_auth, proc_infra):
def test_delete(self, client, test_user_auth, proc_infra, no_warn):
rv = client.put('/uploads/?local_path=%s' % example_file, headers=test_user_auth)
upload = self.assert_upload(rv.data)
self.assert_processing(client, test_user_auth, upload['upload_id'])
......@@ -305,7 +305,7 @@ class TestUploads:
assert rv.status_code == 200
self.assert_upload_does_not_exist(client, upload['upload_id'], test_user_auth)
def test_post(self, client, test_user_auth, example_upload, proc_infra):
def test_post(self, client, test_user_auth, example_upload, proc_infra, no_warn):
rv = client.put('/uploads/?local_path=%s' % example_upload, headers=test_user_auth)
upload = self.assert_upload(rv.data)
self.assert_processing(client, test_user_auth, upload['upload_id'])
......@@ -313,14 +313,14 @@ class TestUploads:
def test_post_metadata(
self, client, proc_infra, admin_user_auth, test_user_auth, test_user,
other_test_user):
other_test_user, no_warn):
rv = client.put('/uploads/?local_path=%s' % example_file, headers=test_user_auth)
upload = self.assert_upload(rv.data)
self.assert_processing(client, test_user_auth, upload['upload_id'])
metadata = dict(comment='test comment')
self.assert_unstage(client, admin_user_auth, upload['upload_id'], proc_infra, metadata)
def test_post_metadata_forbidden(self, client, proc_infra, test_user_auth):
def test_post_metadata_forbidden(self, client, proc_infra, test_user_auth, no_warn):
rv = client.put('/uploads/?local_path=%s' % example_file, headers=test_user_auth)
upload = self.assert_upload(rv.data)
self.assert_processing(client, test_user_auth, upload['upload_id'])
......
......@@ -19,11 +19,11 @@ from nomad.processing import SUCCESS
from tests.test_files import example_file, create_public_upload
def test_get_upload_command(bravado):
def test_get_upload_command(bravado, no_warn):
assert bravado.uploads.get_upload_command().response().result.upload_command is not None
def test_upload(bravado, proc_infra):
def test_upload(bravado, proc_infra, no_warn):
with open(example_file, 'rb') as f:
upload = bravado.uploads.upload(file=f, name='test_upload').response().result
......@@ -34,7 +34,7 @@ def test_upload(bravado, proc_infra):
assert upload.tasks_status == SUCCESS
def test_get_repo_calc(bravado, raw_files):
def test_get_repo_calc(bravado, raw_files, no_warn):
create_public_upload('test_upload', 'pp')
repo = bravado.repo.get_repo_calc(upload_id='test_upload', calc_id='0').response().result
assert repo is not None
......
......@@ -46,7 +46,7 @@ def test_index_upload(elastic, processed: processing.Upload):
def create_entry(calc_with_metadata: datamodel.CalcWithMetadata):
search.Entry.from_calc_with_metadata(calc_with_metadata).save(op_type='create')
search.Entry.from_calc_with_metadata(calc_with_metadata).save()
assert_entry(calc_with_metadata.calc_id)
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment