Commit 41a27005 authored by Markus Scheidgen's avatar Markus Scheidgen
Browse files

Merge branch 'v0.10.11' into 'master'

Merge for release v0.10.11

See merge request !497
parents 89f18a6b 0dc3fdd0
Pipeline #120136 passed with stages
in 14 minutes and 54 seconds
......@@ -2,5 +2,11 @@ Markus Scheidgen <markus.scheidgen@gmail.com>
Lauri Himanen <lauri.himanen@gmail.com>
Alvin Noe Ladines <ladinesalvinnoe@gmail.com>
Speckhard, Daniel <dts@physik.hu-berlin.de>
Luigi Sbailo <sbailo@fhi-berlin.mpg.de>
David Sikter <david.sikter@physik.hu-berlin.de>
Mohammad Nakhaee <mohammad.nakhaee.1@gmail.com>
Maja-Olivia Lenz <lenz@fhi-berlin.mpg.de>
Martin Kuban <kuban@physik.hu-berlin.de>
Cuauhtémoc Salazar <temok@physik.hu-berlin.de>
\ No newline at end of file
Cuauhtémoc Salazar <temok@physik.hu-berlin.de>
Aviral Vaid <aviral@fhi-berlin.mpg.de>
James Kermode <j.r.kermode@warwick.ac.uk>
\ No newline at end of file
Subproject commit 4f6961f9e424cc739ffcbc7aac0928b3363cec5c
Subproject commit b5ea25b92f286ad77b4011051cc6a7a1d494036f
# type: ignore
import requests
import json
response = requests.post(
'http://nomad-lab.eu/prod/rae/api/v1/entries/archive/query', json={
'query': {
'and': [
{
'dft.code_name': 'VASP',
},
{
'not': {
'atoms': {
'any': ["H", "C", "Li", "Na", "K", "Rb", "Cs"]
}
}
}
]
},
'pagination': {
'page_size': 10,
'page_after_value': '----9KNOtIZc9bDFEWxgjeSRsJrC'
},
'required': {
'section_run': {
'section_single_configuration_calculation[-1]': {
'energy_total': '*'
},
'section_system[-1]': {
'chemical_composition_bulk_reduced': '*'
}
}
}
})
print(json.dumps(response.json(), indent=2))
# type: ignore
import requests
import json
response = requests.post(
'http://nomad-lab.eu/prod/rae/api/v1/entries/query', json={
'query': {
'and': [
{
'dft.code_name': 'VASP',
},
{
'not': {
'atoms': {
'any': ["H", "C", "Li", "Na", "K", "Rb", "Cs"]
}
}
}
]
},
'required': {
'include': [
'formula',
'encyclopedia.material.formula',
'encyclopedia.material.formula_reduced'
]
},
'pagination': {
'page_size': 10,
'page_after_value': '----9KNOtIZc9bDFEWxgjeSRsJrC'
}
})
print(json.dumps(response.json(), indent=2))
......@@ -9,22 +9,30 @@ from nomad.metainfo import units
query = ArchiveQuery(
# url='http://nomad-lab.eu/prod/rae/beta/api',
query={
'dft.code_name': 'VASP'
'$and': {
'dft.code_name': 'VASP',
'$not': {
'atoms': ["Ti", "O"]
}
}
},
required={
'section_run': {
'section_single_configuration_calculation': '*',
'section_single_configuration_calculation': {
'energy_total': '*'
},
'section_system': '*'
}
},
per_page=10,
max=100)
max=None)
print(query)
for i, result in enumerate(query):
if i < 10:
calc = result.section_run[0].section_single_configuration_calculation[-1]
energy = calc.energy_total
formula = calc.single_configuration_calculation_to_system_ref.chemical_composition_reduced
print('%s: energy %s' % (formula, energy.to(units.hartree)))
# for i, result in enumerate(query):
# if i < 10:
# calc = result.section_run[0].section_single_configuration_calculation[-1]
# energy = calc.energy_total
# formula = calc.single_configuration_calculation_to_system_ref.chemical_composition_reduced
# print('%s: energy %s' % (formula, energy.to(units.hartree)))
{
"name": "nomad-fair-gui",
"version": "0.10.10",
"version": "0.10.11",
"commit": "e98694e",
"private": true,
"workspaces": [
......
......@@ -6,7 +6,7 @@ window.nomadEnv = {
'appBase': 'http://localhost:8000/fairdi/nomad/latest',
'debug': false,
'version': {
'label': '0.10.10',
'label': '0.10.11',
'isBeta': false,
'isTest': true,
'usesBetaData': true,
......
......@@ -5,7 +5,7 @@ global.nomadEnv = {
'appBase': 'http://nomad-lab.eu/prod/rae/beta',
'debug': false,
'version': {
'label': '0.10.10',
'label': '0.10.11',
'isBeta': false,
'isTest': true,
'usesBetaData': true,
......
......@@ -10054,7 +10054,7 @@ node-releases@^1.1.52, node-releases@^1.1.69:
integrity sha512-DGIjo79VDEyAnRlfSqYTsy+yoHd2IOjJiKUozD2MV2D85Vso6Bug56mb9tT/fY5Urt0iqk01H7x+llAruDR2zA==
 
"nomad-fair-gui@file:.":
version "0.10.10"
version "0.10.11"
dependencies:
"@fontsource/material-icons" "^4.2.1"
"@fontsource/titillium-web" "^4.2.2"
......
......@@ -508,11 +508,15 @@ def edit(parsed_query: Dict[str, Any], mongo_update: Dict[str, Any] = None, re_i
upload_files = files.UploadFiles.get(upload_id, is_authorized=lambda: True)
upload_files_cache[upload_id] = upload_files
entry_metadata = calc.entry_metadata(upload_files)
entry = entry_metadata.a_elastic.create_index_entry().to_dict(include_meta=True)
entry['_op_type'] = 'index'
try:
entry_metadata = calc.entry_metadata(upload_files)
entry = entry_metadata.a_elastic.create_index_entry().to_dict(include_meta=True)
entry['_op_type'] = 'index'
yield entry
yield entry
except Exception as e:
common.logger.error('edit repo could not create index doc', exc_info=e)
for upload_files in upload_files_cache.values():
upload_files.close()
......
......@@ -405,8 +405,11 @@ def re_pack(ctx, uploads, parallel: int):
@click.argument('UPLOADS', nargs=-1)
@click.option('--dry', is_flag=True, help='Just check, do nothing.')
@click.option('-f', '--force', is_flag=True, help='Ignore warnings and perform the operation regardless.')
@click.option('-q', '--quiet', is_flag=True, help='No output (only logs).')
@click.option('--upload-ids', is_flag=True, help='Print uploads with errors.')
@click.option('--label', type=str, help='A label to label log entries with.')
@click.pass_context
def prepare_migration(ctx, uploads, dry, force):
def prepare_migration(ctx, uploads, dry, force, label, quiet, upload_ids):
'''
Removes one of the raw files, either public or restricted depending on the embargo.
Files that need to be removed are saved as `quarantined` in the upload folder.
......@@ -415,12 +418,24 @@ def prepare_migration(ctx, uploads, dry, force):
import os.path
import os
logger = utils.get_logger(__name__)
if label:
logger = logger.bind(label=label)
_, uploads = query_uploads(ctx, uploads)
for upload in uploads:
print(f'Preparing {upload.upload_id} for migration ...')
def log_event(event: str, error: bool = False, **kwargs):
if not quiet:
print(f' {"!!! " if error else ""}{event}', *[value for value in kwargs.values()])
method = getattr(logger, 'error' if error else 'info')
method(event, upload_id=upload.upload_id, **kwargs)
if not quiet:
print(f'Preparing {upload.upload_id} for migration ...')
if not upload.published:
print(' upload is not published, nothing to do')
log_event('upload is not published, nothing to do')
break
with_embargo_values: typing.List[bool] = []
......@@ -431,7 +446,15 @@ def prepare_migration(ctx, uploads, dry, force):
with_embargo_values.append(with_embargo_value)
if len(with_embargo_values) > 1:
print(' !!! inconsistent upload !!!')
if upload_ids:
print(upload.upload_id)
log_event('inconsistent upload', error=True)
break
if len(with_embargo_values) == 0:
if upload_ids:
print(upload.upload_id)
log_event('upload with no indexed entries', error=True)
break
with_embargo = with_embargo_values[0]
......@@ -445,25 +468,29 @@ def prepare_migration(ctx, uploads, dry, force):
to_stay = upload_files._raw_file_object(access)
if not to_move.exists():
print(' obsolute raw.zip was already removed', upload.upload_id, to_move.os_path)
log_event('obsolute raw.zip was already removed', path=to_move.os_path)
elif to_stay.size < to_move.size and not force:
print(' !!! likely inconsistent pack !!!')
if upload_ids:
print(upload.upload_id)
log_event('likely inconsistent pack', error=True)
elif to_move.size == 22:
if not dry:
to_move.delete()
print(' removed empty zip', upload.upload_id, to_move.os_path)
log_event('removed empty zip', path=to_move.os_path)
elif with_embargo and not force:
print(' !!! embargo upload with non empty public file !!!')
if upload_ids:
print(upload.upload_id)
log_event('embargo upload with non empty public file', error=True)
else:
if not dry:
target = upload_files._raw_file_object('quarantined')
assert not target.exists()
os.rename(to_move.os_path, target.os_path)
print(' quarantined', upload.upload_id, to_move.os_path)
log_event('quarantined file', path=to_move.os_path)
@uploads.command(help='Moves certain files from public or restricted to quarantine in published uploads.')
......@@ -557,3 +584,57 @@ def stop(ctx, uploads, calcs: bool, kill: bool, no_celery: bool):
stop_all(proc.Calc.objects(running_query))
if not calcs:
stop_all(proc.Upload.objects(running_query))
@uploads.group(help='Check certain integrity criteria')
@click.pass_context
def integrity(ctx):
pass
@integrity.command(help='Uploads that have datasets with DOIs that do not exist.')
@click.argument('UPLOADS', nargs=-1)
@click.pass_context
def dois(ctx, uploads):
import sys
from nomad.processing import Calc
from nomad.datamodel import Dataset
from nomad.doi import DOI
from nomad.search import SearchRequest
_, uploads = query_uploads(ctx, uploads)
for upload in uploads:
dataset_ids = Calc._get_collection().distinct(
'metadata.datasets',
dict(upload_id=upload.upload_id))
for dataset_id in dataset_ids:
dataset: Dataset = Dataset.m_def.a_mongo.objects(dataset_id=dataset_id).first()
if dataset is None:
print(f'ERROR: dataset does not exist {dataset_id}, seein in upload {upload.upload_id}', file=sys.stderr)
print(upload.upload_id)
continue
if dataset.doi is not None:
doi = DOI.objects(doi=dataset.doi).first()
if doi is None:
continue
results = SearchRequest() \
.search_parameters(upload_id=upload.upload_id, dataset_id=dataset_id) \
.include('datasets') \
.execute_paginated(per_page=1)
if results['total'] == 0:
print(f'WARNING: dataset {dataset_id} not in index for upload {upload.upload_id}', file=sys.stderr)
print(upload.upload_id)
continue
if not any([
dataset.get('doi') == doi.doi
for dataset in results['results'][0]['datasets']]):
print(f'WARNING: DOI of dataset {dataset_id} not in index for upload {upload.upload_id}', file=sys.stderr)
print(upload.upload_id)
......@@ -284,7 +284,7 @@ datacite = NomadConfig(
)
meta = NomadConfig(
version='0.10.10',
version='0.10.11',
commit=gitinfo.commit,
release='devel',
deployment='standard',
......
......@@ -92,7 +92,7 @@ from .ems import EMSMetadata
from .qcms import QCMSMetadata
from .datamodel import (
Dataset, User, Author, EditableUserMetadata, UserProvidableMetadata, OasisMetadata,
MongoMetadata, EntryMetadata, EntryArchive)
MongoMetadata, EntryMetadata, EntryArchive, user_reference, author_reference)
from .optimade import OptimadeEntry, Species
from .metainfo import m_env
......
......@@ -43,6 +43,11 @@ _entry_metadata_defaults = {
if quantity.default not in [None, [], False, 0]
}
_all_author_quantities = [
quantity.name
for quantity in datamodel.EntryMetadata.m_def.all_quantities.values()
if quantity.type in [datamodel.user_reference, datamodel.author_reference]]
def _es_to_entry_dict(hit, required: MetadataRequired) -> Dict[str, Any]:
'''
......@@ -61,6 +66,16 @@ def _es_to_entry_dict(hit, required: MetadataRequired) -> Dict[str, Any]:
entry_dict[key] = value
for author_quantity in _all_author_quantities:
authors = entry_dict.get(author_quantity)
if authors is None:
continue
if isinstance(authors, dict):
authors = [authors]
for author in authors:
if 'email' in author:
del(author['email'])
return entry_dict
......@@ -128,10 +143,17 @@ def index_all(calcs: Iterable[datamodel.EntryMetadata], do_refresh=True) -> None
'''
def elastic_updates():
for calc in calcs:
entry = calc.a_elastic.create_index_entry()
entry = entry.to_dict(include_meta=True)
entry['_op_type'] = 'index'
yield entry
try:
entry = calc.a_elastic.create_index_entry()
entry = entry.to_dict(include_meta=True)
entry['_op_type'] = 'index'
yield entry
except Exception as e:
utils.get_logger(__name__).error(
'could not create index doc', exc_info=e,
upload_id=calc.upload_id, calc_id=calc.calc_id)
_, failed = elasticsearch.helpers.bulk(infrastructure.elastic_client, elastic_updates(), stats_only=True)
......
......@@ -151,7 +151,7 @@ class LogstashFormatter(logstash.formatter.LogstashFormatterBase):
if record.name.startswith('nomad'):
for key, value in structlog.items():
if key in ('event', 'stack_info', 'id', 'timestamp'):
if key in ('event', 'stack_info', 'id', 'timestamp', 'path'):
continue
elif key == 'exception':
exception_trace = value.strip('\n')
......
apiVersion: v1
appVersion: "0.10.10"
appVersion: "0.10.11"
description: A Helm chart for Kubernetes that only runs nomad services and uses externally hosted databases.
name: nomad
version: 0.10.10
version: 0.10.11
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment