Commit 9c6f6c7b authored by Markus Scheidgen's avatar Markus Scheidgen
Browse files

Merge branch 'v0.10.0-rc1' into v0.10.0-rc2

parents c5f9bb25 410b5f04
Pipeline #95375 passed with stages
in 23 minutes and 27 seconds
......@@ -34,3 +34,5 @@ parser.osio.log
gui/src/metainfo.json
gui/src/searchQuantities.json
examples/workdir/
gunicorn.log.conf
nomad/gitinfo.py
......@@ -147,6 +147,8 @@ dev setup tests:
- pip install --upgrade pip
- ./setup.sh
- python -m nomad.cli parse tests/data/parsers/vasp/vasp.xml
- python -c "import nomad.app"
- python -c "import nomad.processing"
except:
refs:
- tags
......
Subproject commit 562ae7d26fa108c7c31ebe38775c767aba1e642c
Subproject commit f55b5ad7826793c20d93a73fab01d028fbb44b00
Subproject commit e2c97b91a97ff997b41a99df9595ba6faea15f36
Subproject commit 52ac697cac040c7d37e960a5b529f8bc00c49f39
Subproject commit d4a5d39b6c012807d71597a3e695295e51b4eedd
Subproject commit 8b0945be756a705daf48f225f7c821f2eecc631a
Subproject commit 7533d541798b110f17873f640b726cf9d7c6302e
Subproject commit 6c8d32cd0107b0cfda9890c82240d5309c568e1f
Subproject commit 9246d3e496073282aa56b920e6d075a97829ffce
Subproject commit 18a179b508a89cfe2074d634e70448c6c5d27f2b
Subproject commit f9870d4dfb52c2a89a90ef1497cef45eac59a8f7
Subproject commit 956c915fe6e789103a6f793fd81d817283c93d0d
Subproject commit a32a1499b6aa163b17fe01fc8d324b55779418ec
Subproject commit 2325478e13a622a04642519d9a69f0da5eaa5e65
Subproject commit 7a9433b319a9a01445fc4542e3ba1f5258c5698c
Subproject commit 261655c3aa762ba6b2066c32e51303db48d5f4d7
Subproject commit c09f64143b6df098bd06882465fb9e4d29c44578
Subproject commit ce55910cabb529b7c0751017c5f10e803e2594a3
Subproject commit 4c35bae7f2834e9657eea438c0a98f8810ac415b
Subproject commit b6b2b661b29e363c41428a27ede19dcde77f63c9
......@@ -162,18 +162,11 @@ yarn start
To run the tests some additional settings and files are necessary that are not part
of the code base.
First you need to create a `nomad.yaml` with the admin password for the user management
system:
```yaml
keycloak:
password: <the-password>
```
Secondly, you need to provide the `springer.msg` Springer materials database. It can
First, you need to provide the `springer.msg` Springer materials database. It can
be copied from `/nomad/fairdi/db/data/springer.msg` on our servers and should
be placed at `nomad/normalizing/data/springer.msg`.
Thirdly, you have to provide static files to serve the docs and NOMAD distribution:
Second, you have to provide static files to serve the docs and NOMAD distribution:
```sh
cd docs
make html
......
......@@ -66,7 +66,7 @@ def wrap_positions(
cell: NDArray[Any] = None,
pbc: Union[bool, NDArray[Any]] = True,
center: NDArray[Any] = [0.5, 0.5, 0.5],
eps: float = 1e-7) -> NDArray[Any]:
eps: float = 1e-12) -> NDArray[Any]:
"""Wraps the given position so that they are within the unit cell. If no
cell is given, scaled positions are assumed. For wrapping cartesian
positions you also need to provide the cell.
......@@ -94,13 +94,14 @@ def wrap_positions(
if cell is None:
fractional = positions
else:
fractional = to_scaled(positions) - shift
fractional = to_scaled(positions, cell)
fractional -= shift
for i, periodic in enumerate(pbc):
if periodic:
fractional[:, i] %= 1.0
fractional[:, i] += shift[i]
if cell:
fractional[:, i] += shift[i]
if cell is not None:
return np.dot(fractional, cell)
else:
return fractional
......
......@@ -69,6 +69,10 @@ def __run_parallel(
cv.notify()
for upload in uploads:
logger.info(
'cli schedules parallel %s processing for upload' % label,
current_process=upload.current_process,
current_task=upload.current_task, upload_id=upload.upload_id)
with cv:
cv.wait_for(lambda: state['available_threads_count'] > 0)
state['available_threads_count'] -= 1
......@@ -84,6 +88,10 @@ def __run_processing(
uploads, parallel: int, process, label: str, reprocess_running: bool = False):
def run_process(upload, logger):
logger.info(
'cli calls %s processing' % label,
current_process=upload.current_process,
current_task=upload.current_task, upload_id=upload.upload_id)
if upload.process_running and not reprocess_running:
logger.warn(
'cannot trigger %s, since the upload is already/still processing' % label,
......
......@@ -99,7 +99,7 @@ def rabbitmq_url():
celery = NomadConfig(
max_memory=64e6, # 64 GB
timeout=1800, # 1/2 h
acks_late=True,
acks_late=False,
routing=CELERY_QUEUE_ROUTING,
priorities={
'Upload.process_upload': 5,
......@@ -288,6 +288,7 @@ meta = NomadConfig(
version='0.10.0',
commit=gitinfo.commit,
release='devel',
deployment='standard',
default_domain='dft',
service='unknown nomad service',
name='novel materials discovery (NOMAD)',
......@@ -310,6 +311,8 @@ raw_file_strip_cutoff = 1000
max_entry_download = 500000
use_empty_parsers = False
reprocess_unmatched = True
reprocess_rematch = True
process_reuse_parser = True
metadata_file_name = 'nomad'
metadata_file_extensions = ('json', 'yaml', 'yml')
......
......@@ -415,6 +415,10 @@ class EntryMetadata(metainfo.MSection):
categories=[MongoMetadata],
a_search=Search())
processing_errors = metainfo.Quantity(
type=str, shape=['*'], description='Errors that occured during processing',
a_search=Search(many_and='append'))
nomad_version = metainfo.Quantity(
type=str,
description='The NOMAD version used for the last processing',
......
......@@ -1523,7 +1523,6 @@ class Dos(MSection):
dos_values_lm = Quantity(
type=np.dtype(np.float64),
shape=['number_of_dos_lms', 'number_of_spin_channels', 'number_of_atoms', 'number_of_dos_values'],
unit='joule',
description='''
Array containing the density (electronic-energy) of states values projected on the
various spherical harmonics (integrated on all atoms), see
......
log, ref, version, commit = "44c7e69 Fixed performance and other isseus.", "heads/mui4", "v0.7.9-398-g44c7e69", "44c7e69"
......@@ -54,6 +54,6 @@ normalizers: Iterable[Type[Normalizer]] = [
# FhiAimsBaseNormalizer,
DosNormalizer,
BandStructureNormalizer,
EncyclopediaNormalizer,
WorkflowNormalizer,
EncyclopediaNormalizer,
]
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment