Commit b860b6a8 authored by Markus Scheidgen's avatar Markus Scheidgen
Browse files

Merge branch 'docs' into 'master'

Docs

See merge request !12
parents 5cfe215a ebe7b0e0
Pipeline #37441 passed with stages
in 4 minutes and 34 seconds
......@@ -21,8 +21,8 @@ sys.path.insert(0, os.path.abspath('..'))
# -- Project information -----------------------------------------------------
project = 'nomad-FAIR'
copyright = '2018, the NOMAD developers'
author = 'the NOMAD developers'
copyright = '2018, FAIRDI e.V.'
author = 'FAIRDI e.V.'
# The short X.Y version
version = ''
......
import React, { Component } from 'react'
import PropTypes from 'prop-types'
import { withStyles } from '@material-ui/core';
import { apiBase } from '../config';
class Documentation extends Component {
......@@ -26,7 +27,7 @@ class Documentation extends Component {
<div className={classes.content}>
<iframe
frameBorder={0} width="768" height={window.innerHeight - 64}
src="http://localhost:8000/nomad/api/docs/index.html"
src={`${apiBase}/docs/index.html`}
/>
</div>
</div>
......
......@@ -32,7 +32,7 @@ base_path = config.services.api_base_path
app = Flask(
__name__,
static_url_path='%s/docs' % base_path,
static_folder='../docs/.build/html')
static_folder=os.path.abspath(os.path.join(os.path.dirname(__file__), '../docs/.build/html')))
CORS(app)
app.config['SECRET_KEY'] = config.services.api_secret
......
......@@ -88,7 +88,11 @@ class File:
self.object_id = object_id
self.ext = ext
self.logger = utils.get_logger(__name__, bucket=bucket, object=object_id)
self.logger = self.bind_logger(utils.get_logger(__name__))
def bind_logger(self, logger):
""" Adds context information to the given logger and returns it. """
return logger.bind(bucket=self.bucket, object=self.object_id)
def open(self, *args, **kwargs) -> IO:
""" Opens the object with he given mode, etc. """
......@@ -155,6 +159,9 @@ class UploadFile(File):
self.filelist: List[str] = None
self._local_path = local_path
def bind_logger(self, logger):
return super().bind_logger(logger).bind(upload_id=self.object_id)
# There is not good way to capsule decorators in a class:
# https://medium.com/@vadimpushtaev/decorator-inside-python-class-1e74d23107f6
class Decorators:
......@@ -182,7 +189,7 @@ class UploadFile(File):
@Decorators.handle_errors
def extract(self) -> None:
"""
'Opens' the upload. This means the uploaed files get extracted to tmp.
'Opens' the upload. This means the upload files get extracted to tmp.
Raises:
UploadFileError: If some IO went wrong.
......@@ -203,6 +210,8 @@ class UploadFile(File):
if zipFile is not None:
zipFile.close()
self.logger.debug('extracted uploaded file')
@Decorators.handle_errors
def remove_extract(self) -> None:
"""
......@@ -217,6 +226,8 @@ class UploadFile(File):
except FileNotFoundError:
raise KeyError()
self.logger.debug('removed uploaded file extract')
def __enter__(self):
self.extract()
return self
......@@ -261,9 +272,14 @@ class ArchiveFile(File):
object_id=archive_id,
ext='json.gz' if config.files.compress_archive else 'json')
def bind_logger(self, logger):
upload_hash, calc_hash = self.object_id.split('/')
return super().bind_logger(logger).bind(
archive_id=self.object_id, upload_hash=upload_hash, calc_hash=calc_hash)
@contextmanager
def write_archive_json(self) -> Generator[TextIO, None, None]:
""" Context manager that yiels a file-like to write the archive json. """
""" Context manager that yields a file-like to write the archive json. """
if config.files.compress_archive:
binary_out = self.open('wb')
gzip_wrapper = cast(TextIO, gzip.open(binary_out, 'wt'))
......@@ -280,9 +296,11 @@ class ArchiveFile(File):
out.close()
binary_out.close()
self.logger.debug('archive file written')
@contextmanager
def read_archive_json(self) -> Generator[TextIO, None, None]:
""" Context manager that yiels a file-like to read the archive json. """
""" Context manager that yields a file-like to read the archive json. """
try:
if config.files.compress_archive:
binary_in = self.open(mode='rb')
......@@ -301,12 +319,17 @@ class ArchiveFile(File):
in_file.close()
binary_in.close()
self.logger.debug('archive file read')
@staticmethod
def delete_archives(upload_hash: str):
""" Delete all archives of one upload with the given hash. """
bucket = config.files.archive_bucket
Objects.delete_all(bucket, upload_hash)
utils.get_logger(__name__, bucket=bucket, upload_hash=upload_hash) \
.debug('archive files deleted')
class ArchiveLogFile(File):
"""
......
......@@ -132,7 +132,7 @@ class Proc(Document, metaclass=ProcMetaclass):
def create(cls, **kwargs):
""" Factory method that must be used instead of regular constructor. """
assert cls.tasks is not None and len(cls.tasks) > 0, \
""" the class attribute tasks must be overwritten with an acutal list """
""" the class attribute tasks must be overwritten with an actual list """
assert 'status' not in kwargs, \
""" do not set the status manually, its managed """
......@@ -245,8 +245,8 @@ class Proc(Document, metaclass=ProcMetaclass):
def block_until_complete(self, interval=0.01):
"""
Reloads the process constrantly until it sees a completed process. Should be
used with care as it can block indefinetly. Just intended for testing purposes.
Reloads the process constantly until it sees a completed process. Should be
used with care as it can block indefinitely. Just intended for testing purposes.
"""
while not self.completed:
time.sleep(interval)
......@@ -275,13 +275,13 @@ class InvalidChordUsage(Exception): pass
class Chord(Proc):
"""
A special Proc base class that manages a chord of child processes. It saves some
attional state to track child processes and provides methods to control that
additional state to track child processes and provides methods to control that
state.
It uses a counter approach with atomic updates to trac the number of processed
It uses a counter approach with atomic updates to track the number of processed
children.
TODO the joined attribute is not stricly necessary and only serves debugging purposes.
TODO the joined attribute is not strictly necessary and only serves debugging purposes.
Maybe it should be removed, since it also requires another save.
TODO it is vital that sub classes and children don't miss any calls. This might
......@@ -313,7 +313,7 @@ class Chord(Proc):
self._check_join(children=0)
def completed_child(self):
""" Children must call this, when they completed processig. """
""" Children must call this, when they completed processing. """
self._check_join(children=1)
def _check_join(self, children):
......@@ -374,12 +374,12 @@ class Chord(Proc):
def task(func):
"""
The decorator for tasks that will be wrapped in excaption handling that will fail the process.
The decorator for tasks that will be wrapped in exception handling that will fail the process.
The task methods of a :class:`Proc` class/document comprise a sequence
(order of methods in class namespace) of tasks. Tasks must be executed in that order.
Completion of the last task, will put the :class:`Proc` instance into the
SUCCESS state. Calling the first task will put it into RUNNING state. Tasks will
only be exectued, if the process has not yet reached FAILURE state.
only be executed, if the process has not yet reached FAILURE state.
"""
def wrapper(self, *args, **kwargs):
if self.status == 'FAILURE':
......@@ -416,9 +416,9 @@ def proc_task(task, cls_name, self_id, func_attr):
It ignores results, since all results are handled via the self document.
It retries for 3 times with a countdown of 3 on missing 'selfs', since this
might happen in sharded, distributed mongo setups where the object might not
have yet been propagated and therefore apear missing.
have yet been propagated and therefore appear missing.
"""
logger = utils.get_logger('__name__', cls=cls_name, id=self_id, func=func_attr)
logger = utils.get_logger(__name__, cls=cls_name, id=self_id, func=func_attr)
# get the process class
logger.debug('received process function call')
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment