Commit 56e041c3 authored by Markus Scheidgen's avatar Markus Scheidgen
Browse files

Merge branch 'v0.6.1' into 'master'

Release v0.6.1

See merge request !62
parents 370913c4 fff2396c
Pipeline #62124 passed with stage
in 22 seconds
Subproject commit dc59aff14e69a6818fbefeb7b1d504348438e26b Subproject commit 7d850bd76e878b1429d6974f5daac456099f6e4f
...@@ -201,8 +201,14 @@ class NavigationUnstyled extends React.Component { ...@@ -201,8 +201,14 @@ class NavigationUnstyled extends React.Component {
} }
componentDidMount() { componentDidMount() {
fetch(`${guiBase}/meta.json`) fetch(`${guiBase}/meta.json`, {
.then((response) => response.json()) method: 'GET',
cache: 'no-cache',
headers: {
'Pragma': 'no-cache',
'Cache-Control': 'no-cache, no-store'
}
}).then((response) => response.json())
.then((meta) => { .then((meta) => {
if (meta.version !== packageJson.version) { if (meta.version !== packageJson.version) {
console.log('GUI API version mismatch') console.log('GUI API version mismatch')
......
...@@ -280,7 +280,12 @@ class LegacyMetainfoEnvironment: ...@@ -280,7 +280,12 @@ class LegacyMetainfoEnvironment:
return env return env
def generate_metainfo_code(self, package: Package, directory: str): def generate_metainfo_code(
self, package: Package, directory: str = None, package_name: str = None):
if directory is None:
directory = '.'
def format_description(description, indent=0, width=90): def format_description(description, indent=0, width=90):
paragraphs = [paragraph.strip() for paragraph in description.split('\n')] paragraphs = [paragraph.strip() for paragraph in description.split('\n')]
...@@ -315,8 +320,14 @@ class LegacyMetainfoEnvironment: ...@@ -315,8 +320,14 @@ class LegacyMetainfoEnvironment:
format_type=format_type, format_type=format_type,
format_unit=format_unit) format_unit=format_unit)
with open(os.path.join(directory, '%s.py' % package.name), 'wt') as f: with open(os.path.join(
f.write(env.get_template('package.j2').render(pkg=package)) directory, '%s.py' % package_name
if package_name is not None else package.name), 'wt') as f:
code = env.get_template('package.j2').render(pkg=package)
code = '\n'.join([
line.rstrip() if line.strip() != '' else ''
for line in code.split('\n')])
f.write(code)
if __name__ == '__main__': if __name__ == '__main__':
...@@ -326,3 +337,4 @@ if __name__ == '__main__': ...@@ -326,3 +337,4 @@ if __name__ == '__main__':
package_names=['%s.nomadmetainfo.json' % pkg for pkg in ['common', 'public', 'vasp']]) package_names=['%s.nomadmetainfo.json' % pkg for pkg in ['common', 'public', 'vasp']])
legacy_env = env.legacy_info_env() legacy_env = env.legacy_info_env()
env.generate_metainfo_code(env.env.all_packages['public.nomadmetainfo.json'], package_name='public')
...@@ -1938,6 +1938,7 @@ class Environment(MSection): ...@@ -1938,6 +1938,7 @@ class Environment(MSection):
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
self.all_definitions_by_name: Dict[str, List[Definition]] = dict() self.all_definitions_by_name: Dict[str, List[Definition]] = dict()
self.all_packages: Dict[str, Package] = dict()
def resolve_definitions( # type: ignore def resolve_definitions( # type: ignore
self, name: str, cls: Type[MSectionBound] = Definition) -> List[MSectionBound]: self, name: str, cls: Type[MSectionBound] = Definition) -> List[MSectionBound]:
...@@ -1961,6 +1962,7 @@ class Environment(MSection): ...@@ -1961,6 +1962,7 @@ class Environment(MSection):
def on_add_sub_section(self, sub_section_def: SubSection, sub_section: MSection): def on_add_sub_section(self, sub_section_def: SubSection, sub_section: MSection):
if sub_section_def == Environment.packages: if sub_section_def == Environment.packages:
package = sub_section.m_as(Package) package = sub_section.m_as(Package)
self.all_packages[package.name] = package
for definition in package.m_all_contents(): for definition in package.m_all_contents():
if isinstance(definition, Definition): if isinstance(definition, Definition):
definitions = self.all_definitions_by_name.setdefault(definition.name, []) definitions = self.all_definitions_by_name.setdefault(definition.name, [])
......
...@@ -71,10 +71,11 @@ based on NOMAD-coe's *python-common* module. ...@@ -71,10 +71,11 @@ based on NOMAD-coe's *python-common* module.
:members: :members:
""" """
from typing import Callable, IO, Union from typing import Callable, IO, Union, Dict
import magic import magic
import gzip import gzip
import bz2 import bz2
import lzma
import os.path import os.path
from nomad import files, config from nomad import files, config
...@@ -87,7 +88,8 @@ from nomad.parsing.artificial import TemplateParser, GenerateRandomParser, Chaos ...@@ -87,7 +88,8 @@ from nomad.parsing.artificial import TemplateParser, GenerateRandomParser, Chaos
_compressions = { _compressions = {
b'\x1f\x8b\x08': ('gz', gzip.open), b'\x1f\x8b\x08': ('gz', gzip.open),
b'\x42\x5a\x68': ('bz2', bz2.open) b'\x42\x5a\x68': ('bz2', bz2.open),
b'\xfd\x37\x7a': ('xz', lzma.open)
} }
...@@ -116,7 +118,7 @@ def match_parser(mainfile: str, upload_files: Union[str, files.StagingUploadFile ...@@ -116,7 +118,7 @@ def match_parser(mainfile: str, upload_files: Union[str, files.StagingUploadFile
with open(mainfile_path, 'rb') as f: with open(mainfile_path, 'rb') as f:
compression, open_compressed = _compressions.get(f.read(3), (None, open)) compression, open_compressed = _compressions.get(f.read(3), (None, open))
with open_compressed(mainfile_path, 'rb') as cf: with open_compressed(mainfile_path, 'rb') as cf: # type: ignore
buffer = cf.read(config.parser_matching_size) buffer = cf.read(config.parser_matching_size)
mime_type = magic.from_buffer(buffer, mime=True) mime_type = magic.from_buffer(buffer, mime=True)
...@@ -147,14 +149,14 @@ parsers = [ ...@@ -147,14 +149,14 @@ parsers = [
LegacyParser( LegacyParser(
name='parsers/vasp', code_name='VASP', name='parsers/vasp', code_name='VASP',
parser_class_name='vaspparser.VASPRunParserInterface', parser_class_name='vaspparser.VASPRunParserInterface',
mainfile_mime_re=r'(application/xml)|(text/.*)', mainfile_mime_re=r'(application/.*)|(text/.*)',
mainfile_contents_re=( mainfile_contents_re=(
r'^\s*<\?xml version="1\.0" encoding="ISO-8859-1"\?>\s*' r'^\s*<\?xml version="1\.0" encoding="ISO-8859-1"\?>\s*'
r'?\s*<modeling>' r'?\s*<modeling>'
r'?\s*<generator>' r'?\s*<generator>'
r'?\s*<i name="program" type="string">\s*vasp\s*</i>' r'?\s*<i name="program" type="string">\s*vasp\s*</i>'
r'?'), r'?'),
supported_compressions=['gz', 'bz2'] supported_compressions=['gz', 'bz2', 'xz']
), ),
VaspOutcarParser( VaspOutcarParser(
name='parsers/vasp-outcar', code_name='VASP', name='parsers/vasp-outcar', code_name='VASP',
......
...@@ -15,7 +15,7 @@ worker: ...@@ -15,7 +15,7 @@ worker:
replicas: 1 replicas: 1
routing: "queue" routing: "queue"
processes: 10 processes: 10
nomadtype: "prod-worker" nomadNodeType: "prod-worker"
dbname: fairdi_nomad_prod dbname: fairdi_nomad_prod
......
...@@ -15,7 +15,7 @@ worker: ...@@ -15,7 +15,7 @@ worker:
replicas: 1 replicas: 1
routing: "queue" routing: "queue"
processes: 10 processes: 10
nomadtype: "prod-worker" nomadNodeType: "prod-worker"
dbname: fairdi_nomad_prod dbname: fairdi_nomad_prod
......
...@@ -39,6 +39,16 @@ data: ...@@ -39,6 +39,16 @@ data:
rewrite ^{{ .Values.proxy.external.path }}/gui/service-worker.js /nomad/service-worker.js break; rewrite ^{{ .Values.proxy.external.path }}/gui/service-worker.js /nomad/service-worker.js break;
} }
location {{ .Values.proxy.external.path }}/gui/meta.json {
add_header Last-Modified $date_gmt;
add_header Cache-Control 'no-store, no-cache, must-revalidate, proxy-revalidate, max-age=0';
if_modified_since off;
expires off;
etag off;
root /app/;
rewrite ^{{ .Values.proxy.external.path }}/gui/meta.json /nomad/meta.json break;
}
location {{ .Values.proxy.external.path }}/api/uploads { location {{ .Values.proxy.external.path }}/api/uploads {
client_max_body_size 35g; client_max_body_size 35g;
proxy_request_buffering off; proxy_request_buffering off;
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment