Commit 4d5381b0 authored by Markus Scheidgen's avatar Markus Scheidgen
Browse files

Merge branch 'logging' into 'master'

Adding some log entries with processing timing (poor man's APM).

See merge request !9
parents d476cb66 33632071
Pipeline #37376 passed with stages
in 5 minutes and 1 second
......@@ -44,7 +44,7 @@
"cwd": "${workspaceFolder}",
"program": "${workspaceFolder}/.pyenv/bin/pytest",
"args": [
"-sv", "tests/test_parsing.py::test_parser[parsers/vasp-tests/data/parsers/vasp.xml]"
"-sv", "tests/test_utils::test_timer"
]
},
{
......
This source diff could not be displayed because it is too large. You can view the blob instead.
......@@ -176,9 +176,10 @@ class Calc(Proc):
@task
def parsing(self):
logger = self.get_calc_logger()
logger = self.get_calc_logger(parser=self.parser)
parser = parser_dict[self.parser]
self._parser_backend = parser.run(self.mainfile_tmp_path, logger=logger)
with utils.timer(logger, 'parser executed'):
self._parser_backend = parser.run(self.mainfile_tmp_path, logger=logger)
if self._parser_backend.status[0] != 'ParseSuccess':
logger.error(self._parser_backend.status[1])
error = self._parser_backend.status[1]
......@@ -186,10 +187,11 @@ class Calc(Proc):
@task
def normalizing(self):
logger = self.get_calc_logger()
for normalizer in normalizers:
normalizer_name = normalizer.__name__
normalizer(self._parser_backend).normalize(logger=logger)
logger = self.get_calc_logger(normalizer=normalizer_name)
with utils.timer(logger, 'normalizer executed'):
normalizer(self._parser_backend).normalize(logger=logger)
if self._parser_backend.status[0] != 'ParseSuccess':
logger.error(self._parser_backend.status[1])
error = self._parser_backend.status[1]
......@@ -200,29 +202,30 @@ class Calc(Proc):
@task
def archiving(self):
upload_hash, calc_hash = self.archive_id.split('/')
additional = dict(
mainfile=self.mainfile,
upload_time=self._upload.upload_time,
staging=True,
restricted=False,
user_id=self._upload.user_id)
# persist to elastic search
RepoCalc.create_from_backend(
self._parser_backend,
additional=additional,
upload_hash=upload_hash,
calc_hash=calc_hash,
upload_id=self.upload_id)
# persist the archive
with ArchiveFile(self.archive_id).write_archive_json() as out:
self._parser_backend.write_json(out, pretty=True)
# close loghandler
if self._calc_proc_logwriter is not None:
self._calc_proc_logwriter.close()
self._calc_proc_logwriter = None
with utils.timer(self.get_logger(), 'archived'):
upload_hash, calc_hash = self.archive_id.split('/')
additional = dict(
mainfile=self.mainfile,
upload_time=self._upload.upload_time,
staging=True,
restricted=False,
user_id=self._upload.user_id)
# persist to elastic search
RepoCalc.create_from_backend(
self._parser_backend,
additional=additional,
upload_hash=upload_hash,
calc_hash=calc_hash,
upload_id=self.upload_id)
# persist the archive
with ArchiveFile(self.archive_id).write_archive_json() as out:
self._parser_backend.write_json(out, pretty=True)
# close loghandler
if self._calc_proc_logwriter is not None:
self._calc_proc_logwriter.close()
self._calc_proc_logwriter = None
class Upload(Chord):
......@@ -391,9 +394,9 @@ class Upload(Chord):
def extracting(self):
logger = self.get_logger()
try:
self._upload = UploadFile(self.upload_id, local_path=self.local_path)
self._upload.extract()
logger.debug('opened upload')
with utils.timer(logger, 'upload extracted'):
self._upload = UploadFile(self.upload_id, local_path=self.local_path)
self._upload.extract()
except KeyError as e:
self.fail('process request for non existing upload', level=logging.INFO)
return
......@@ -439,7 +442,8 @@ class Upload(Chord):
@task
def cleanup(self):
try:
upload = UploadFile(self.upload_id, local_path=self.local_path)
with utils.timer(self.get_logger(), 'processing cleaned up'):
upload = UploadFile(self.upload_id, local_path=self.local_path)
except KeyError as e:
self.fail('Upload does not exist', exc_info=e)
return
......
......@@ -43,6 +43,7 @@ import json
import os
import sys
import uuid
import time
from nomad import config
......@@ -119,7 +120,7 @@ if not _logging_is_configured:
logging.basicConfig(stream=sys.stdout)
root = logging.getLogger()
for handler in root.handlers:
handler.setLevel(config.console_log_level if 'pytest' not in sys.modules else logging.DEBUG)
handler.setLevel(config.console_log_level if 'pytest' not in sys.modules else logging.CRITICAL)
# configure logstash
if config.logstash.enabled and 'pytest' not in sys.modules:
......@@ -173,3 +174,19 @@ def lnr(logger, event, **kwargs):
except Exception as e:
logger.error(event, exc_info=e, **kwargs)
raise e
@contextmanager
def timer(logger, event, method='info'):
start = time.time()
try:
yield
finally:
stop = time.time()
logger_method = getattr(logger, 'info', None)
if logger_method is not None:
logger_method(event, exec_time=stop - start)
else:
logger.error('Uknown logger method %s.' % method)
# Copyright 2018 Markus Scheidgen
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an"AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import logging
......
# Copyright 2018 Markus Scheidgen
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an"AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
import json
from nomad import utils
def test_timer(caplog):
with utils.timer(utils.get_logger('test_logger'), 'test measure'):
time.sleep(0.1)
assert json.loads(caplog.record_tuples[0][2])['event'] == 'test measure'
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment