Commit e086fc91 authored by Markus Scheidgen's avatar Markus Scheidgen
Browse files

Merge branch 'v1.0.6' into 'master'

Merge for release v1.0.6

See merge request !601
parents 535d0c11 4a2379a2
Pipeline #127966 passed with stages
in 44 minutes and 8 seconds
...@@ -83,7 +83,7 @@ python tests: ...@@ -83,7 +83,7 @@ python tests:
services: services:
- name: rabbitmq:3.9.13 - name: rabbitmq:3.9.13
alias: rabbitmq alias: rabbitmq
- name: docker.elastic.co/elasticsearch/elasticsearch:6.8.15 - name: docker.elastic.co/elasticsearch/elasticsearch:7.17.1
alias: elastic alias: elastic
# fix issue with running elastic in gitlab ci runner: # fix issue with running elastic in gitlab ci runner:
# https://gitlab.com/gitlab-org/gitlab-ce/issues/42214 # https://gitlab.com/gitlab-org/gitlab-ce/issues/42214
......
[submodule "dependencies/parsers/mpes"]
path = dependencies/parsers/mpes
url = https://github.com/nomad-coe/nomad-parser-mpes.git
branch = master
[submodule "dependencies/parsers/aptfim"]
path = dependencies/parsers/aptfim
url = https://github.com/nomad-coe/nomad-parser-aptfim.git
branch = master
[submodule "dependencies/materia"] [submodule "dependencies/materia"]
path = gui/materia path = gui/materia
url = https://github.com/nomad-coe/materia url = https://github.com/nomad-coe/materia
...@@ -19,9 +11,6 @@ ...@@ -19,9 +11,6 @@
[submodule "dependencies/parsers/example"] [submodule "dependencies/parsers/example"]
path = dependencies/parsers/example path = dependencies/parsers/example
url = https://github.com/nomad-coe/nomad-parser-example.git url = https://github.com/nomad-coe/nomad-parser-example.git
[submodule "dependencies/parsers/xps"]
path = dependencies/parsers/xps
url = https://github.com/nomad-coe/nomad-parser-xps.git
[submodule "dependencies/parsers/electronic"] [submodule "dependencies/parsers/electronic"]
path = dependencies/parsers/electronic path = dependencies/parsers/electronic
url = https://github.com/nomad-coe/electronic-parsers.git url = https://github.com/nomad-coe/electronic-parsers.git
......
...@@ -46,6 +46,9 @@ contributing, and API reference. ...@@ -46,6 +46,9 @@ contributing, and API reference.
Omitted versions are plain bugfix releases with only minor changes and fixes. Omitted versions are plain bugfix releases with only minor changes and fixes.
### v1.0.6
- upgraded to Elasticsearch 7.x
### v1.0.4 ### v1.0.4
- tabular data schema - tabular data schema
......
Subproject commit 51b0ccd3656355b1c33687d910ed9f982c9e297c
Subproject commit 09c9fbf47391814bbe95d76b7468f86eb8c361fb
Subproject commit 2d607abb021f7c19c5b1e2b1896c2b29ed68ea87
...@@ -100,7 +100,7 @@ services: ...@@ -100,7 +100,7 @@ services:
# the search engine # the search engine
elastic: elastic:
restart: always restart: always
image: docker.elastic.co/elasticsearch/elasticsearch:6.8.15 image: docker.elastic.co/elasticsearch/elasticsearch:7.17.1
container_name: nomad_oasis_elastic container_name: nomad_oasis_elastic
environment: environment:
- discovery.type=single-node - discovery.type=single-node
...@@ -185,6 +185,7 @@ A few things to notice: ...@@ -185,6 +185,7 @@ A few things to notice:
- The NOMAD images we use are tagged `stable`. This could be replaced with concrete version tags. - The NOMAD images we use are tagged `stable`. This could be replaced with concrete version tags.
- The services are setup to restart `always`, you might want to change this to `no` while debugging errors to prevent - The services are setup to restart `always`, you might want to change this to `no` while debugging errors to prevent
indefinite restarts. indefinite restarts.
- When the elasticsearch version gets upgraded between different NOMAD version (e.g. NOMAD v1.0.6 introduced elasticsearch 7.x), you should be able to simply restart the elasticsearch containers. Docker will pull the new image. At least elasticsearch 7.x should be compatible with indices created in version 6.0 or later.
### nomad.yaml ### nomad.yaml
......
{ {
"name": "nomad-fair-gui", "name": "nomad-fair-gui",
"version": "1.0.5", "version": "1.0.6",
"commit": "e98694e", "commit": "e98694e",
"private": true, "private": true,
"workspaces": [ "workspaces": [
......
...@@ -11,7 +11,7 @@ window.nomadEnv = { ...@@ -11,7 +11,7 @@ window.nomadEnv = {
'encyclopediaBase': 'https://nomad-lab.eu/prod/rae/encyclopedia/#', 'encyclopediaBase': 'https://nomad-lab.eu/prod/rae/encyclopedia/#',
'debug': false, 'debug': false,
'version': { 'version': {
'label': '1.0.5', 'label': '1.0.6',
'isBeta': false, 'isBeta': false,
'isTest': true, 'isTest': true,
'usesBetaData': true, 'usesBetaData': true,
......
...@@ -11,7 +11,7 @@ global.nomadEnv = { ...@@ -11,7 +11,7 @@ global.nomadEnv = {
'appBase': 'http://nomad-lab.eu/prod/rae/beta', 'appBase': 'http://nomad-lab.eu/prod/rae/beta',
'debug': false, 'debug': false,
'version': { 'version': {
'label': '1.0.5', 'label': '1.0.6',
'isBeta': false, 'isBeta': false,
'isTest': true, 'isTest': true,
'usesBetaData': true, 'usesBetaData': true,
......
This diff is collapsed.
...@@ -311,7 +311,7 @@ datacite = NomadConfig( ...@@ -311,7 +311,7 @@ datacite = NomadConfig(
) )
meta = NomadConfig( meta = NomadConfig(
version='1.0.5', version='1.0.6',
commit=gitinfo.commit, commit=gitinfo.commit,
deployment='devel', deployment='devel',
label=None, label=None,
......
...@@ -449,8 +449,6 @@ class Index(): ...@@ -449,8 +449,6 @@ class Index():
self.index_config_key = index_config_key self.index_config_key = index_config_key
def __elasticsearch_operation(self, name: str, *args, **kwargs): def __elasticsearch_operation(self, name: str, *args, **kwargs):
if 'doc_type' not in kwargs:
kwargs['doc_type'] = self.doc_type.name
if 'index' not in kwargs: if 'index' not in kwargs:
kwargs['index'] = self.index_name kwargs['index'] = self.index_name
...@@ -493,15 +491,12 @@ class Index(): ...@@ -493,15 +491,12 @@ class Index():
} }
} }
}, },
'mappings': { 'mappings': self.doc_type.mapping
self.doc_type.name: self.doc_type.mapping
}
}) })
logger.info('elasticsearch index created') logger.info('elasticsearch index created')
elif upsert: elif upsert:
self.elastic_client.indices.put_mapping( self.elastic_client.indices.put_mapping(
index=self.index_name, index=self.index_name,
doc_type=self.doc_type.name,
body=self.doc_type.mapping) body=self.doc_type.mapping)
logger.info('elasticsearch index updated') logger.info('elasticsearch index updated')
else: else:
......
...@@ -25,11 +25,8 @@ from nomad.datamodel import results ...@@ -25,11 +25,8 @@ from nomad.datamodel import results
from .parser import MissingParser, BrokenParser, Parser, ArchiveParser, MatchingParserInterface from .parser import MissingParser, BrokenParser, Parser, ArchiveParser, MatchingParserInterface
from .artificial import EmptyParser, GenerateRandomParser, TemplateParser, ChaosParser from .artificial import EmptyParser, GenerateRandomParser, TemplateParser, ChaosParser
from xpsparser import XPSParser
from eelsdbparser import EELSDBParser from eelsdbparser import EELSDBParser
# TODO
# from mpesparser import MPESParser
# from aptfimparser import APTFIMParser
try: try:
# these packages are not available without parsing extra, which is ok, if the # these packages are not available without parsing extra, which is ok, if the
...@@ -489,10 +486,7 @@ parsers = [ ...@@ -489,10 +486,7 @@ parsers = [
name='parsers/phonopy', code_name='Phonopy', code_homepage='https://phonopy.github.io/phonopy/', name='parsers/phonopy', code_name='Phonopy', code_homepage='https://phonopy.github.io/phonopy/',
mainfile_name_re=(r'(.*/phonopy-FHI-aims-displacement-0*1/control.in$)|(.*/phon.+yaml)') mainfile_name_re=(r'(.*/phonopy-FHI-aims-displacement-0*1/control.in$)|(.*/phon.+yaml)')
), ),
# MPESParser(),
# APTFIMParser(),
EELSDBParser(), EELSDBParser(),
XPSParser(),
ArchiveParser() ArchiveParser()
] ]
......
...@@ -34,7 +34,7 @@ partially implemented. ...@@ -34,7 +34,7 @@ partially implemented.
from typing import Union, List, Iterable, Any, cast, Dict, Iterator, Generator, Callable from typing import Union, List, Iterable, Any, cast, Dict, Iterator, Generator, Callable
import json import json
import elasticsearch import elasticsearch.helpers
from elasticsearch.exceptions import TransportError, RequestError from elasticsearch.exceptions import TransportError, RequestError
from elasticsearch_dsl import Q, A, Search from elasticsearch_dsl import Q, A, Search
from elasticsearch_dsl.query import Query as EsQuery from elasticsearch_dsl.query import Query as EsQuery
...@@ -226,13 +226,13 @@ def update_metadata( ...@@ -226,13 +226,13 @@ def update_metadata(
yield dict( yield dict(
doc=entry_doc, doc=entry_doc,
_id=entry_metadata.entry_id, _id=entry_metadata.entry_id,
_type=entry_index.doc_type.name,
_index=entry_index.index_name, _index=entry_index.index_name,
_op_type='update') _op_type='update')
updates = list(elastic_updates()) updates = list(elastic_updates())
_, failed = elasticsearch.helpers.bulk( _, failed = elasticsearch.helpers.bulk(
infrastructure.elastic_client, updates, stats_only=True) infrastructure.elastic_client, updates, stats_only=True)
failed = cast(int, failed)
if update_materials: if update_materials:
# TODO update the matrials index at least for v1 # TODO update the matrials index at least for v1
...@@ -894,9 +894,9 @@ def _es_to_api_aggregation( ...@@ -894,9 +894,9 @@ def _es_to_api_aggregation(
entries = None entries = None
if 'entries' in es_bucket: if 'entries' in es_bucket:
if longest_nested_key: if longest_nested_key:
entries = [{longest_nested_key: item['_source']} for item in es_bucket.entries.hits.hits] entries = [{longest_nested_key: item['_source'].to_dict()} for item in es_bucket.entries.hits.hits]
else: else:
entries = [item['_source'] for item in es_bucket.entries.hits.hits] entries = [item['_source'].to_dict() for item in es_bucket.entries.hits.hits]
# By default ES returns values of 0 and 1 for terms aggregation # By default ES returns values of 0 and 1 for terms aggregation
# targeting boolean values. Here we transform them into True/False # targeting boolean values. Here we transform them into True/False
...@@ -1047,7 +1047,7 @@ def search( ...@@ -1047,7 +1047,7 @@ def search(
if order_field != doc_type.id_field: if order_field != doc_type.id_field:
sort[doc_type.id_field] = pagination.order.value sort[doc_type.id_field] = pagination.order.value
search = search.sort(sort) search = search.sort(sort)
search = search.extra(size=pagination.page_size) search = search.extra(size=pagination.page_size, track_total_hits=True)
if pagination.page_offset: if pagination.page_offset:
search = search.extra(**{'from': pagination.page_offset}) search = search.extra(**{'from': pagination.page_offset})
...@@ -1124,7 +1124,7 @@ def search( ...@@ -1124,7 +1124,7 @@ def search(
# pagination # pagination
next_page_after_value = None next_page_after_value = None
if 0 < len(es_response.hits) < es_response.hits.total and len(es_response.hits) >= pagination.page_size: if 0 < len(es_response.hits) < es_response.hits.total.value and len(es_response.hits) >= pagination.page_size:
last = es_response.hits[-1] last = es_response.hits[-1]
if order_field == doc_type.id_field: if order_field == doc_type.id_field:
next_page_after_value = last[doc_type.id_field] next_page_after_value = last[doc_type.id_field]
...@@ -1135,7 +1135,7 @@ def search( ...@@ -1135,7 +1135,7 @@ def search(
after_value = last.meta.sort[0] after_value = last.meta.sort[0]
next_page_after_value = '%s:%s' % (after_value, last[doc_type.id_field]) next_page_after_value = '%s:%s' % (after_value, last[doc_type.id_field])
pagination_response = PaginationResponse( pagination_response = PaginationResponse(
total=es_response.hits.total, total=es_response.hits.total.value,
next_page_after_value=next_page_after_value, next_page_after_value=next_page_after_value,
**pagination.dict()) **pagination.dict())
......
...@@ -45,7 +45,7 @@ services: ...@@ -45,7 +45,7 @@ services:
# the search engine # the search engine
elastic: elastic:
restart: 'no' restart: 'no'
image: docker.elastic.co/elasticsearch/elasticsearch:6.8.15 image: docker.elastic.co/elasticsearch/elasticsearch:7.17.1
container_name: nomad_elastic container_name: nomad_elastic
environment: environment:
- cluster.routing.allocation.disk.threshold_enabled=true - cluster.routing.allocation.disk.threshold_enabled=true
......
...@@ -36,7 +36,7 @@ services: ...@@ -36,7 +36,7 @@ services:
# the search engine # the search engine
elastic: elastic:
restart: always restart: always
image: docker.elastic.co/elasticsearch/elasticsearch:6.8.15 image: docker.elastic.co/elasticsearch/elasticsearch:7.17.1
container_name: nomad_oasis_elastic container_name: nomad_oasis_elastic
environment: environment:
- discovery.type=single-node - discovery.type=single-node
......
apiVersion: v1 apiVersion: v1
appVersion: "1.0.5" appVersion: "1.0.6"
description: A Helm chart for Kubernetes that only runs nomad services and uses externally hosted databases. description: A Helm chart for Kubernetes that only runs nomad services and uses externally hosted databases.
name: nomad name: nomad
version: 1.0.5 version: "1.0.6"
...@@ -16,7 +16,7 @@ worker: ...@@ -16,7 +16,7 @@ worker:
routing: "queue" routing: "queue"
elastic: elastic:
host: elasticsearch.elasticsearch.svc.cluster.local host: elasticsearch.elasticsearch-7.svc.cluster.local
mongo: mongo:
host: rs0/mongodb-0.mongo.mongodb.svc.cluster.local,mongodb-1.mongo.mongodb.svc.cluster.local,mongodb-2.mongo.mongodb.svc.cluster.local host: rs0/mongodb-0.mongo.mongodb.svc.cluster.local,mongodb-1.mongo.mongodb.svc.cluster.local,mongodb-2.mongo.mongodb.svc.cluster.local
......
version: version:
label: "v1.0.3" label: "1.0.6"
isBeta: true isBeta: true
usesBetaData: false usesBetaData: false
officialUrl: "https://nomad-lab.eu/prod/rae/gui" officialUrl: "https://nomad-lab.eu/prod/rae/gui"
...@@ -36,7 +36,7 @@ worker: ...@@ -36,7 +36,7 @@ worker:
nomadNodeType: "prod-worker" nomadNodeType: "prod-worker"
elastic: elastic:
host: elasticsearch.elasticsearch.svc.cluster.local host: elasticsearch.elasticsearch-7.svc.cluster.local
port: 9200 port: 9200
mongo: mongo:
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment