Commit 531de273 authored by Markus Scheidgen's avatar Markus Scheidgen
Browse files

Improved the archive log system.

parent f9e8c8cc
Pipeline #68610 passed with stages
in 14 minutes and 19 seconds
......@@ -4,7 +4,7 @@ import { withErrors } from './errors'
import { UploadRequest } from '@navjobs/upload'
import Swagger from 'swagger-client'
import { apiBase } from '../config'
import { Typography, withStyles, Link } from '@material-ui/core'
import { Typography, withStyles } from '@material-ui/core'
import LoginLogout from './LoginLogout'
import { compose } from 'recompose'
import MetaInfoRepository from './MetaInfoRepository'
......
import React from 'react'
import PropTypes from 'prop-types'
import { withStyles, Fab, Typography } from '@material-ui/core'
import { withStyles, Fab, Typography, ExpansionPanel, ExpansionPanelSummary, ExpansionPanelDetails } from '@material-ui/core'
import { compose } from 'recompose'
import { withApi } from '../api'
import Download from './Download'
import DownloadIcon from '@material-ui/icons/CloudDownload'
import ExpandMoreIcon from '@material-ui/icons/ExpandMore'
import ReactJson from 'react-json-view'
import { amber } from '@material-ui/core/colors'
import { maxLogsToShow } from '../../config'
class LogEntryUnstyled extends React.Component {
static propTypes = {
classes: PropTypes.object.isRequired,
entry: PropTypes.string.isRequired
}
static styles = theme => ({
warning: {
color: amber[700]
},
exception: {
margin: 0
}
})
render() {
const { classes, entry } = this.props
let data = undefined
try {
data = JSON.parse(entry)
} catch (e) {
return <ExpansionPanelSummary>
<Typography>{entry}</Typography>
</ExpansionPanelSummary>
}
const summaryProps = {}
if (data.level === 'ERROR' || data.level === 'CRITICAL') {
summaryProps.color = 'error'
} else if (data.level === 'WARNING') {
summaryProps.classes = {root: classes.warning}
}
return (
<ExpansionPanel>
<ExpansionPanelSummary expandIcon={<ExpandMoreIcon />}>
<Typography {...summaryProps}>{data.level}: {data.event} {(data.parser || data.normalizer) ? `(${data.parser || data.normalizer})` : ''}</Typography>
</ExpansionPanelSummary>
<ExpansionPanelDetails>
<ReactJson
src={data}
enableClipboard={false}
displayObjectSize={false} />
</ExpansionPanelDetails>
{data.exception && <ExpansionPanelDetails>
<pre className={classes.exception}>{data.exception}</pre>
</ExpansionPanelDetails>}
</ExpansionPanel>)
}
}
const LogEntry = withStyles(LogEntryUnstyled.styles)(LogEntryUnstyled)
class ArchiveLogView extends React.Component {
static propTypes = {
......@@ -17,11 +72,9 @@ class ArchiveLogView extends React.Component {
static styles = theme => ({
root: {
'& pre': {
overflowX: 'auto'
}
marginTop: theme.spacing.unit * 2
},
error: {
moreLogs: {
marginTop: theme.spacing.unit * 2
},
downloadFab: {
......@@ -72,20 +125,32 @@ class ArchiveLogView extends React.Component {
if (doesNotExist) {
return (
<Typography className={classes.error}>
<Typography>
No archive log does exist for this entry. Most likely the entry itself does not
exist.
</Typography>
)
}
let content = 'loading ...'
if (data) {
const lines = data.split('\n')
content = <div>
{lines.slice(0, maxLogsToShow).map((entry, i) => <LogEntry key={i} entry={entry}/>)}
{lines.length > maxLogsToShow && <Typography classes={{root: classes.moreLogs}}>
There are {lines.length - maxLogsToShow} more log entries. Download the log to see all of them.
</Typography>}
</div>
}
return (
<div className={classes.root}>
<pre>{data || 'loading ...'}</pre>
{content}
<Download
classes={{root: classes.downloadFab}} tooltip="download logfile"
component={Fab} className={classes.downloadFab} size="medium"
color="primary"
url={`archive/logs/${uploadId}/${calcId}`} fileName={`${calcId}.log`}
>
<DownloadIcon />
......
......@@ -13,6 +13,7 @@ export const keycloakClientId = window.nomadEnv.keycloakClientId
export const debug = window.nomadEnv.debug || false
export const sendTrackingData = window.nomadEnv.sendTrackingData
export const email = 'webmaster@nomad-coe.eu'
export const maxLogsToShow = 50
export const consent = `
By using this web-site and by uploading and downloading data, you agree to the
......
......@@ -33,6 +33,7 @@ import os.path
from datetime import datetime
from pymongo import UpdateOne
import hashlib
from structlog.processors import StackInfoRenderer, format_exc_info, TimeStamper, JSONRenderer
from nomad import utils, config, infrastructure, search, datamodel
from nomad.files import PathObject, UploadFiles, ExtractError, ArchiveBasedStagingUploadFiles, PublicUploadFiles, StagingUploadFiles
......@@ -42,6 +43,29 @@ from nomad.normalizing import normalizers
from nomad.datamodel import UploadWithMetadata, Domain
def _pack_log_event(logger, method_name, event_dict):
try:
log_data = dict(event_dict)
log_data.update(**{
key: value
for key, value in getattr(logger, '_context', {}).items()
if key not in ['service', 'release', 'upload_id', 'calc_id', 'mainfile', 'process_status']})
log_data.update(level=method_name.upper(), logger=logger.name)
return log_data
except Exception:
# raising an exception would cause an indefinite loop
return event_dict
_log_processors = [
StackInfoRenderer(),
_pack_log_event,
format_exc_info,
TimeStamper(fmt="%Y-%m-%d %H:%M.%S", utc=False),
JSONRenderer(sort_keys=True)]
class Calc(Proc):
"""
Instances of this class represent calculations. This class manages the elastic
......@@ -143,20 +167,17 @@ class Calc(Proc):
else:
def save_to_calc_log(logger, method_name, event_dict):
if self._calc_proc_logwriter is not None:
program = event_dict.get('normalizer', 'parser')
event = event_dict.get('event', '')
try:
self._calc_proc_logwriter.write(event_dict)
self._calc_proc_logwriter.write('\n')
entry = '[%s] %s, %s: %s' % (method_name, datetime.utcnow().isoformat(), program, event)
if len(entry) > 140:
self._calc_proc_logwriter.write(entry[:140])
self._calc_proc_logwriter.write('...')
else:
self._calc_proc_logwriter.write(entry)
self._calc_proc_logwriter.write('\n')
except Exception:
# Exceptions here will cause indefinite loop
pass
return event_dict
return wrap_logger(logger, processors=[save_to_calc_log])
return wrap_logger(logger, processors=_log_processors + [save_to_calc_log])
@process
def re_process_calc(self):
......@@ -285,14 +306,10 @@ class Calc(Proc):
self._parser_backend = parser.run(
self.upload_files.raw_file_object(self.mainfile).os_path, logger=logger)
except Exception as e:
self.fail(
'parser failed with exception', level=logging.ERROR,
exc_info=e, error=str(e), **context)
self.fail('parser failed with exception', exc_info=e, error=str(e), **context)
return
except SystemExit:
self.fail(
'parser raised system exit', level=logging.ERROR,
error='system exit', **context)
self.fail('parser raised system exit', error='system exit', **context)
return
# add the non code specific calc metadata to the backend
......@@ -317,9 +334,8 @@ class Calc(Proc):
self.add_processor_info(self.parser)
if self._parser_backend.status[0] != 'ParseSuccess':
logger.error(self._parser_backend.status[1])
error = self._parser_backend.status[1]
self.fail(error, level=logging.INFO, **context)
self.fail('parser failed', error=error, **context)
@contextmanager
def use_parser_backend(self, processor_name):
......@@ -364,20 +380,18 @@ class Calc(Proc):
try:
normalizer(backend).normalize(logger=logger)
except Exception as e:
self._parser_backend.finishedParsingSession('ParseFailure', [str(e)])
self.fail(
'normalizer failed with exception', level=logging.ERROR,
exc_info=e, error=str(e), **context)
self._parser_backend.status = ['ParseFailure', str(e)]
failed = self._parser_backend.status[0] != 'ParseSuccess'
if failed:
logger.error(self._parser_backend.status[1])
error = self._parser_backend.status[1]
self.fail(error, level=logging.WARNING, error=error, **context)
break
else:
logger.debug(
'completed normalizer successfully', normalizer=normalizer_name)
'normalizer failed with exception', exc_info=e, error=str(e), **context)
break
else:
if self._parser_backend.status[0] != 'ParseSuccess':
error = self._parser_backend.status[1]
self.fail('normalizer failed', error=error, **context)
break
else:
logger.debug(
'completed normalizer successfully', normalizer=normalizer_name)
@task
def archiving(self):
......
......@@ -90,7 +90,13 @@ def assert_processing(upload: Upload, published: bool = False):
assert 'section_entry_info' in archive
with upload_files.archive_log_file(calc.calc_id, 'rt') as f:
assert 'a test' in f.read()
has_test_event = False
for line in f.readlines():
log_data = json.loads(line)
for key in ['event', 'calc_id', 'level']:
key in log_data
has_test_event = has_test_event or log_data['event'] == 'a test log entry'
assert has_test_event
assert len(calc.errors) == 0
with upload_files.raw_file(calc.mainfile) as f:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment