data.py 45.3 KB
Newer Older
Markus Scheidgen's avatar
Markus Scheidgen committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
# Copyright 2018 Markus Scheidgen
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an"AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

"""
This module comprises a set of persistent document classes that hold all user related
data. These are information about users, their uploads and datasets, the associated
calculations, and files


.. autoclass:: Calc
Markus Scheidgen's avatar
Markus Scheidgen committed
22

Markus Scheidgen's avatar
Markus Scheidgen committed
23
.. autoclass:: Upload
Markus Scheidgen's avatar
Markus Scheidgen committed
24

Markus Scheidgen's avatar
Markus Scheidgen committed
25
26
"""

Markus Scheidgen's avatar
Markus Scheidgen committed
27
from typing import cast, List, Any, ContextManager, Tuple, Generator, Dict, cast
28
from mongoengine import StringField, DateTimeField, DictField, BooleanField, IntField
Markus Scheidgen's avatar
Markus Scheidgen committed
29
import logging
30
from structlog import wrap_logger
31
from contextlib import contextmanager
32
import os.path
33
34
from datetime import datetime
from pymongo import UpdateOne
35
import hashlib
36
37
from structlog.processors import StackInfoRenderer, format_exc_info, TimeStamper
import json
Markus Scheidgen's avatar
Markus Scheidgen committed
38

39
from nomad import utils, config, infrastructure, search, datamodel
40
from nomad.files import PathObject, UploadFiles, ExtractError, ArchiveBasedStagingUploadFiles, PublicUploadFiles, StagingUploadFiles
41
from nomad.processing.base import Proc, process, task, PENDING, SUCCESS, FAILURE
42
from nomad.parsing import parser_dict, match_parser, LocalBackend
Markus Scheidgen's avatar
Markus Scheidgen committed
43
from nomad.normalizing import normalizers
44
from nomad.datamodel import UploadWithMetadata, Domain
Markus Scheidgen's avatar
Markus Scheidgen committed
45
46


47
48
49
50
51
52
53
def _pack_log_event(logger, method_name, event_dict):
    try:
        log_data = dict(event_dict)
        log_data.update(**{
            key: value
            for key, value in getattr(logger, '_context', {}).items()
            if key not in ['service', 'release', 'upload_id', 'calc_id', 'mainfile', 'process_status']})
54
        log_data.update(logger=logger.name)
55
56
57
58
59
60
61
62
63
64
65

        return log_data
    except Exception:
        # raising an exception would cause an indefinite loop
        return event_dict


_log_processors = [
    StackInfoRenderer(),
    _pack_log_event,
    format_exc_info,
66
    TimeStamper(fmt="%Y-%m-%d %H:%M.%S", utc=False)]
67
68


Markus Scheidgen's avatar
Markus Scheidgen committed
69
class Calc(Proc):
Markus Scheidgen's avatar
Markus Scheidgen committed
70
71
72
73
74
75
    """
    Instances of this class represent calculations. This class manages the elastic
    search index entry, files, and archive for the respective calculation.

    It also contains the calculations processing and its state.

76
77
    The attribute list, does not include the various metadata properties generated
    while parsing, including ``code_name``, ``code_version``, etc.
Markus Scheidgen's avatar
Markus Scheidgen committed
78
79

    Attributes:
80
        calc_id: the calc_id of this calc
Markus Scheidgen's avatar
Markus Scheidgen committed
81
82
83
        parser: the name of the parser used to process this calc
        upload_id: the id of the upload used to create this calculation
        mainfile: the mainfile (including path in upload) that was used to create this calc
84

85
        metadata: the metadata record wit calc and user metadata, see :class:`datamodel.CalcWithMetadata`
Markus Scheidgen's avatar
Markus Scheidgen committed
86
    """
87
    calc_id = StringField(primary_key=True)
Markus Scheidgen's avatar
Markus Scheidgen committed
88
89
90
91
    upload_id = StringField()
    mainfile = StringField()
    parser = StringField()

92
93
    metadata = DictField()

Markus Scheidgen's avatar
Markus Scheidgen committed
94
    meta: Any = {
Markus Scheidgen's avatar
Markus Scheidgen committed
95
        'indexes': [
96
97
98
99
            'upload_id',
            ('upload_id', 'mainfile'),
            ('upload_id', 'parser'),
            ('upload_id', 'tasks_status'),
100
            ('upload_id', 'process_status'),
101
102
103
            ('upload_id', 'metadata.nomad_version'),
            'metadata.published',
            'metadata.datasets'
Markus Scheidgen's avatar
Markus Scheidgen committed
104
            'metadata.pid'
Markus Scheidgen's avatar
Markus Scheidgen committed
105
106
107
108
109
        ]
    }

    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
110
        self._parser_backend: LocalBackend = None
111
112
        self._upload: Upload = None
        self._upload_files: ArchiveBasedStagingUploadFiles = None
113
        self._calc_proc_logwriter = None
114
        self._calc_proc_logwriter_ctx: ContextManager = None
Markus Scheidgen's avatar
Markus Scheidgen committed
115

116
117
118
119
120
121
122
123
124
125
    @classmethod
    def from_calc_with_metadata(cls, calc_with_metadata):
        calc = Calc.create(
            calc_id=calc_with_metadata.calc_id,
            upload_id=calc_with_metadata.upload_id,
            mainfile=calc_with_metadata.mainfile,
            metadata=calc_with_metadata.to_dict())

        return calc

Markus Scheidgen's avatar
Markus Scheidgen committed
126
127
    @classmethod
    def get(cls, id):
128
        return cls.get_by_id(id, 'calc_id')
Markus Scheidgen's avatar
Markus Scheidgen committed
129

Markus Scheidgen's avatar
Markus Scheidgen committed
130
    @property
131
132
    def mainfile_file(self) -> PathObject:
        return self.upload_files.raw_file_object(self.mainfile)
Markus Scheidgen's avatar
Markus Scheidgen committed
133

134
135
136
137
    @property
    def upload(self) -> 'Upload':
        if not self._upload:
            self._upload = Upload.get(self.upload_id)
138
            self._upload.worker_hostname = self.worker_hostname
139
140
        return self._upload

141
142
143
    @property
    def upload_files(self) -> ArchiveBasedStagingUploadFiles:
        if not self._upload_files:
144
145
            self._upload_files = ArchiveBasedStagingUploadFiles(
                self.upload_id, is_authorized=lambda: True, upload_path=self.upload.upload_path)
146
147
        return self._upload_files

Markus Scheidgen's avatar
Markus Scheidgen committed
148
    def get_logger(self, **kwargs):
149
150
151
152
        """
        Returns a wrapped logger that additionally saves all entries to the calculation
        processing log in the archive.
        """
153
154
155
        logger = super().get_logger()
        logger = logger.bind(
            upload_id=self.upload_id, mainfile=self.mainfile, calc_id=self.calc_id, **kwargs)
156

157
        if self._calc_proc_logwriter_ctx is None:
158
159
160
161
162
163
164
165
166
167
168
169
            try:
                self._calc_proc_logwriter_ctx = self.upload_files.archive_log_file(self.calc_id, 'wt')
                self._calc_proc_logwriter = self._calc_proc_logwriter_ctx.__enter__()  # pylint: disable=E1101
            except KeyError:
                # cannot open log file
                pass

        if self._calc_proc_logwriter_ctx is None:
            return logger
        else:
            def save_to_calc_log(logger, method_name, event_dict):
                if self._calc_proc_logwriter is not None:
170
                    try:
171
172
173
                        dump_dict = dict(event_dict)
                        dump_dict.update(level=method_name.upper())
                        json.dump(dump_dict, self._calc_proc_logwriter, sort_keys=True)
174
                        self._calc_proc_logwriter.write('\n')
175

176
177
178
                    except Exception:
                        # Exceptions here will cause indefinite loop
                        pass
179
180
181

                return event_dict

182
            return wrap_logger(logger, processors=_log_processors + [save_to_calc_log])
183

184
185
186
187
188
189
190
    @process
    def re_process_calc(self):
        """
        Processes a calculation again. This means there is already metadata and
        instead of creating it initially, we are just updating the existing
        records.
        """
191
        parser = match_parser(self.mainfile, self.upload_files, strict=False)
192
193

        if parser is None and not config.reprocess_unmatched:
194
195
196
197
198
199
200
201
202
            # Remove the logsfile and set a fake logwriter to avoid creating a log file,
            # because we will probably remove this calc and don't want to have ghost logfiles.
            if self._calc_proc_logwriter_ctx is not None:
                self._calc_proc_logwriter_ctx.__exit__(None, None, None)
                self.upload_files.archive_log_file_object(self.calc_id).delete()

            self._calc_proc_logwriter_ctx = open('/dev/null', 'wt')
            self._calc_proc_logwriter = self._calc_proc_logwriter_ctx.__enter__()
            self.get_logger().error(
203
204
205
                'no parser matches during re-process, will not re-process this calc')

            self.errors = ['no parser matches during re-process, will not re-process this calc']
206
207
208
209
210
211

            # mock the steps of actual processing
            self._continue_with('parsing')
            self._continue_with('normalizing')
            self._continue_with('archiving')
            self._complete()
212
213
            return

214
215
216
217
        logger = self.get_logger()
        if parser is None:
            self.get_logger().error('no parser matches during re-process, use the old parser')
            self.errors = ['no matching parser found during re-processing']
218
        if self.parser != parser.name:
219
220
            self.parser = parser.name
            logger.info(
221
222
                'different parser matches during re-process, use new parser',
                parser=parser.name)
223

224
        try:
225
226
227
228
229
230
231
            calc_with_metadata = datamodel.CalcWithMetadata(**self.metadata)
            calc_with_metadata.upload_id = self.upload_id
            calc_with_metadata.calc_id = self.calc_id
            calc_with_metadata.calc_hash = self.upload_files.calc_hash(self.mainfile)
            calc_with_metadata.mainfile = self.mainfile
            calc_with_metadata.nomad_version = config.version
            calc_with_metadata.nomad_commit = config.commit
232
            calc_with_metadata.last_processing = datetime.utcnow()
233
            calc_with_metadata.files = self.upload_files.calc_files(self.mainfile)
234
            self.metadata = calc_with_metadata.to_dict()
235
236
237
238
239
240
241
242
243
244
245
246
247

            self.parsing()
            self.normalizing()
            self.archiving()
        finally:
            # close loghandler that was not closed due to failures
            try:
                if self._calc_proc_logwriter is not None:
                    self._calc_proc_logwriter.close()
                    self._calc_proc_logwriter = None
            except Exception as e:
                logger.error('could not close calculation proc log', exc_info=e)

Markus Scheidgen's avatar
Markus Scheidgen committed
248
    @process
249
    def process_calc(self):
250
251
252
253
        """
        Processes a new calculation that has no prior records in the mongo, elastic,
        or filesystem storage. It will create an initial set of (user) metadata.
        """
254
        logger = self.get_logger()
255
        if self.upload is None:
256
            logger.error('calculation upload does not exist')
Markus Scheidgen's avatar
Markus Scheidgen committed
257
258

        try:
259
260
            # save preliminary minimum calc metadata in case processing fails
            # successful processing will replace it with the actual metadata
261
            calc_with_metadata = datamodel.CalcWithMetadata(
262
263
264
265
266
                upload_id=self.upload_id,
                calc_id=self.calc_id,
                calc_hash=self.upload_files.calc_hash(self.mainfile),
                mainfile=self.mainfile)
            calc_with_metadata.published = False
267
            calc_with_metadata.uploader = self.upload.user_id
268
            calc_with_metadata.upload_time = self.upload.upload_time
269
            calc_with_metadata.upload_name = self.upload.name
270
            calc_with_metadata.nomad_version = config.version
271
            calc_with_metadata.nomad_commit = config.commit
272
            calc_with_metadata.last_processing = datetime.utcnow()
273
274
275
            calc_with_metadata.files = self.upload_files.calc_files(self.mainfile)
            self.metadata = calc_with_metadata.to_dict()

276
277
278
279
280
            if len(calc_with_metadata.files) >= config.auxfile_cutoff:
                self.warning(
                    'This calc has many aux files in its directory. '
                    'Have you placed many calculations in the same directory?')

Markus Scheidgen's avatar
Markus Scheidgen committed
281
282
283
284
            self.parsing()
            self.normalizing()
            self.archiving()
        finally:
Markus Scheidgen's avatar
Markus Scheidgen committed
285
            # close loghandler that was not closed due to failures
286
            try:
287
288
289
                if self._calc_proc_logwriter is not None:
                    self._calc_proc_logwriter.close()
                    self._calc_proc_logwriter = None
290
291
292
            except Exception as e:
                logger.error('could not close calculation proc log', exc_info=e)

293
294
295
296
    def fail(self, *errors, log_level=logging.ERROR, **kwargs):
        # in case of failure, index a minimum set of metadata and mark
        # processing failure
        try:
297
            calc_with_metadata = datamodel.CalcWithMetadata(**self.metadata)
298
299
300
301
302
303
304
305
            calc_with_metadata.formula = config.services.not_processed_value
            calc_with_metadata.basis_set = config.services.not_processed_value
            calc_with_metadata.xc_functional = config.services.not_processed_value
            calc_with_metadata.system = config.services.not_processed_value
            calc_with_metadata.crystal_system = config.services.not_processed_value
            calc_with_metadata.spacegroup = config.services.not_processed_value
            calc_with_metadata.spacegroup_symbol = config.services.not_processed_value
            calc_with_metadata.code_version = config.services.not_processed_value
306
307
308
309
310
311
312

            calc_with_metadata.code_name = config.services.not_processed_value
            if self.parser is not None:
                parser = parser_dict[self.parser]
                if hasattr(parser, 'code_name'):
                    calc_with_metadata.code_name = parser.code_name

313
314
315
316
317
318
319
320
            calc_with_metadata.processed = False
            self.metadata = calc_with_metadata.to_dict()
            search.Entry.from_calc_with_metadata(calc_with_metadata).save()
        except Exception as e:
            self.get_logger().error('could not index after processing failure', exc_info=e)

        super().fail(*errors, log_level=log_level, **kwargs)

321
322
323
324
    def on_process_complete(self, process_name):
        # the save might be necessary to correctly read the join condition from the db
        self.save()
        # in case of error, the process_name might be unknown
325
        if process_name == 'process_calc' or process_name == 're_process_calc' or process_name is None:
326
327
            self.upload.reload()
            self.upload.check_join()
Markus Scheidgen's avatar
Markus Scheidgen committed
328
329
330

    @task
    def parsing(self):
Markus Scheidgen's avatar
Markus Scheidgen committed
331
        """ The *task* that encapsulates all parsing related actions. """
332
        context = dict(parser=self.parser, step=self.parser)
333
        logger = self.get_logger(**context)
334
        parser = parser_dict[self.parser]
335
        self.metadata['parser_name'] = self.parser
Markus Scheidgen's avatar
Markus Scheidgen committed
336

337
        with utils.timer(logger, 'parser executed', input_size=self.mainfile_file.size):
338
339
340
341
            try:
                self._parser_backend = parser.run(
                    self.upload_files.raw_file_object(self.mainfile).os_path, logger=logger)
            except Exception as e:
342
                self.fail('parser failed with exception', exc_info=e, error=str(e), **context)
343
                return
344
            except SystemExit:
345
                self.fail('parser raised system exit', error='system exit', **context)
346
                return
Markus Scheidgen's avatar
Markus Scheidgen committed
347

348
349
        # add the non code specific calc metadata to the backend
        # all other quantities have been determined by parsers/normalizers
350
        self._parser_backend.openNonOverlappingSection('section_entry_info')
351
        self._parser_backend.addValue('upload_id', self.upload_id)
352
        self._parser_backend.addValue('calc_id', self.calc_id)
353
        self._parser_backend.addValue('calc_hash', self.metadata['calc_hash'])
354
        self._parser_backend.addValue('mainfile', self.mainfile)
355
        self._parser_backend.addValue('parser_name', self.parser)
356
357
358
359
360
361
362
363
364
365
366
367
        filepaths = self.metadata['files']
        self._parser_backend.addValue('number_of_files', len(filepaths))
        self._parser_backend.addValue('filepaths', filepaths)
        uploader = self.upload.uploader
        self._parser_backend.addValue(
            'entry_uploader_name', '%s, %s' % (uploader.first_name, uploader.last_name))
        self._parser_backend.addValue(
            'entry_uploader_id', str(uploader.user_id))
        self._parser_backend.addValue('entry_upload_time', int(self.upload.upload_time.timestamp()))
        self._parser_backend.closeNonOverlappingSection('section_entry_info')

        self.add_processor_info(self.parser)
368

Markus Scheidgen's avatar
Markus Scheidgen committed
369
370
        if self._parser_backend.status[0] != 'ParseSuccess':
            error = self._parser_backend.status[1]
371
            self.fail('parser failed', error=error, **context)
372
373
374
375
376
377
378
379

    @contextmanager
    def use_parser_backend(self, processor_name):
        self._parser_backend.reset_status()
        yield self._parser_backend
        self.add_processor_info(processor_name)

    def add_processor_info(self, processor_name: str) -> None:
380
        self._parser_backend.openContext('/section_entry_info/0')
381
382
383
384
385
386
387
        self._parser_backend.openNonOverlappingSection('section_archive_processing_info')
        self._parser_backend.addValue('archive_processor_name', processor_name)

        if self._parser_backend.status[0] == 'ParseSuccess':
            warnings = getattr(self._parser_backend, '_warnings', [])
            if len(warnings) > 0:
                self._parser_backend.addValue('archive_processor_status', 'WithWarnings')
388
                self._parser_backend.addValue('number_of_archive_processor_warnings', len(warnings))
389
390
391
392
                self._parser_backend.addArrayValues('archive_processor_warnings', [str(warning) for warning in warnings])
            else:
                self._parser_backend.addValue('archive_processor_status', 'Success')
        else:
393
            errors = self._parser_backend.status[1]
394
395
396
            self._parser_backend.addValue('archive_processor_error', str(errors))

        self._parser_backend.closeNonOverlappingSection('section_archive_processing_info')
397
        self._parser_backend.closeContext('/section_entry_info/0')
Markus Scheidgen's avatar
Markus Scheidgen committed
398
399
400

    @task
    def normalizing(self):
Markus Scheidgen's avatar
Markus Scheidgen committed
401
        """ The *task* that encapsulates all normalizing related actions. """
Markus Scheidgen's avatar
Markus Scheidgen committed
402
        for normalizer in normalizers:
403
404
405
            if normalizer.domain != config.domain:
                continue

Markus Scheidgen's avatar
Markus Scheidgen committed
406
            normalizer_name = normalizer.__name__
407
            context = dict(normalizer=normalizer_name, step=normalizer_name)
408
            logger = self.get_logger(**context)
Markus Scheidgen's avatar
Markus Scheidgen committed
409
410

            with utils.timer(
411
                    logger, 'normalizer executed', input_size=self.mainfile_file.size):
412
                with self.use_parser_backend(normalizer_name) as backend:
413
414
415
                    try:
                        normalizer(backend).normalize(logger=logger)
                    except Exception as e:
416
                        self._parser_backend.finishedParsingSession('ParseFailure', [str(e)])
417
                        self.fail(
418
419
420
421
422
423
424
425
426
427
                            'normalizer failed with exception', exc_info=e, error=str(e), **context)
                        break
                    else:
                        if self._parser_backend.status[0] != 'ParseSuccess':
                            error = self._parser_backend.status[1]
                            self.fail('normalizer failed', error=error, **context)
                            break
                        else:
                            logger.debug(
                                'completed normalizer successfully', normalizer=normalizer_name)
Markus Scheidgen's avatar
Markus Scheidgen committed
428
429
430

    @task
    def archiving(self):
Markus Scheidgen's avatar
Markus Scheidgen committed
431
        """ The *task* that encapsulates all archival related actions. """
432
433
        logger = self.get_logger()

434
        calc_with_metadata = datamodel.CalcWithMetadata(**self.metadata)
435
        calc_with_metadata.apply_domain_metadata(self._parser_backend)
436
        calc_with_metadata.processed = True
437

438
439
        # persist the calc metadata
        with utils.timer(logger, 'saved calc metadata', step='metadata'):
440
            self.metadata = calc_with_metadata.to_dict()
441
442

        # index in search
443
        with utils.timer(logger, 'indexed', step='index'):
444
            search.Entry.from_calc_with_metadata(calc_with_metadata).save()
445

446
        # persist the archive
Markus Scheidgen's avatar
Markus Scheidgen committed
447
        with utils.timer(
448
                logger, 'archived', step='archive',
Markus Scheidgen's avatar
Markus Scheidgen committed
449
                input_size=self.mainfile_file.size) as log_data:
450
            with self.upload_files.archive_file(self.calc_id, 'wt') as out:
451
                self._parser_backend.write_json(out, pretty=True, root_sections=Domain.instance.root_sections)
452

453
            log_data.update(archive_size=self.upload_files.archive_file_object(self.calc_id).size)
Markus Scheidgen's avatar
Markus Scheidgen committed
454
455
456
457

        # close loghandler
        if self._calc_proc_logwriter is not None:
            with utils.timer(
458
                    logger, 'archived log', step='logs',
Markus Scheidgen's avatar
Markus Scheidgen committed
459
                    input_size=self.mainfile_file.size) as log_data:
460
                self._calc_proc_logwriter_ctx.__exit__(None, None, None)  # pylint: disable=E1101
461
                self._calc_proc_logwriter = None
Markus Scheidgen's avatar
Markus Scheidgen committed
462

463
                log_data.update(log_size=self.upload_files.archive_log_file_object(self.calc_id).size)
Markus Scheidgen's avatar
Markus Scheidgen committed
464

465
    def __str__(self):
466
        return 'calc %s calc_id=%s upload_id%s' % (super().__str__(), self.calc_id, self.upload_id)
467

468

469
class Upload(Proc):
Markus Scheidgen's avatar
Markus Scheidgen committed
470
471
472
473
474
475
    """
    Represents uploads in the databases. Provides persistence access to the files storage,
    and processing state.

    Attributes:
        name: optional user provided upload name
476
477
        upload_path: the path were the uploaded files was stored
        temporary: True if the uploaded file should be removed after extraction
Markus Scheidgen's avatar
Markus Scheidgen committed
478
479
        upload_id: the upload id generated by the database
        upload_time: the timestamp when the system realised the upload
480
        user_id: the id of the user that created this upload
Markus Scheidgen's avatar
Markus Scheidgen committed
481
482
        published: Boolean that indicates the publish status
        publish_time: Date when the upload was initially published
483
        last_update: Date of the last publishing/re-processing
Markus Scheidgen's avatar
Markus Scheidgen committed
484
        joined: Boolean indicates if the running processing has joined (:func:`check_join`)
Markus Scheidgen's avatar
Markus Scheidgen committed
485
486
487
488
    """
    id_field = 'upload_id'

    upload_id = StringField(primary_key=True)
489
490
    upload_path = StringField(default=None)
    temporary = BooleanField(default=False)
491
    embargo_length = IntField(default=36)
Markus Scheidgen's avatar
Markus Scheidgen committed
492
493
494

    name = StringField(default=None)
    upload_time = DateTimeField()
495
    user_id = StringField(required=True)
496
497
    published = BooleanField(default=False)
    publish_time = DateTimeField()
498
    last_update = DateTimeField()
Markus Scheidgen's avatar
Markus Scheidgen committed
499

500
501
    joined = BooleanField(default=False)

Markus Scheidgen's avatar
Markus Scheidgen committed
502
503
    meta: Any = {
        'indexes': [
504
            'user_id', 'tasks_status', 'process_status', 'published', 'upload_time'
Markus Scheidgen's avatar
Markus Scheidgen committed
505
506
507
508
509
        ]
    }

    def __init__(self, **kwargs):
        super().__init__(**kwargs)
510
        self._upload_files: ArchiveBasedStagingUploadFiles = None
Markus Scheidgen's avatar
Markus Scheidgen committed
511

512
513
    @property
    def metadata(self) -> dict:
Markus Scheidgen's avatar
Markus Scheidgen committed
514
515
516
517
518
519
520
        """
        Getter, setter for user metadata. Metadata is pickled to and from the public
        bucket to allow sharing among all processes. Usually uploads do not have (much)
        user defined metadata, but users provide all metadata per upload as part of
        the publish process. This will change, when we introduce editing functionality
        and metadata will be provided through different means.
        """
521
522
523
524
525
        try:
            upload_files = PublicUploadFiles(self.upload_id, is_authorized=lambda: True)
        except KeyError:
            return None
        return upload_files.user_metadata
526
527
528

    @metadata.setter
    def metadata(self, data: dict) -> None:
529
530
        upload_files = PublicUploadFiles(self.upload_id, is_authorized=lambda: True, create=True)
        upload_files.user_metadata = data
531

Markus Scheidgen's avatar
Markus Scheidgen committed
532
    @classmethod
533
    def get(cls, id: str, include_published: bool = True) -> 'Upload':
534
        return cls.get_by_id(id, 'upload_id')
Markus Scheidgen's avatar
Markus Scheidgen committed
535
536

    @classmethod
537
    def user_uploads(cls, user: datamodel.User, **kwargs) -> List['Upload']:
538
539
        """ Returns all uploads for the given user. Kwargs are passed to mongo query. """
        return cls.objects(user_id=str(user.user_id), **kwargs)
Markus Scheidgen's avatar
Markus Scheidgen committed
540

541
542
    @property
    def uploader(self):
543
        return datamodel.User.get(self.user_id)
544

Markus Scheidgen's avatar
Markus Scheidgen committed
545
546
    def get_logger(self, **kwargs):
        logger = super().get_logger()
547
548
        user = self.uploader
        user_name = '%s %s' % (user.first_name, user.last_name)
549
        # We are not using 'user_id' because logstash (?) will filter these entries ?!
550
551
        logger = logger.bind(
            upload_id=self.upload_id, upload_name=self.name, user_name=user_name,
552
            user=user.user_id, **kwargs)
Markus Scheidgen's avatar
Markus Scheidgen committed
553
554
555
556
557
558
559
560
        return logger

    @classmethod
    def create(cls, **kwargs) -> 'Upload':
        """
        Creates a new upload for the given user, a user given name is optional.
        It will populate the record with a signed url and pending :class:`UploadProc`.
        The upload will be already saved to the database.
561
562

        Arguments:
563
            user: The user that created the upload.
Markus Scheidgen's avatar
Markus Scheidgen committed
564
        """
565
566
        # use kwargs to keep compatibility with super method
        user: datamodel.User = kwargs['user']
567
        del(kwargs['user'])
568

569
570
        if 'upload_id' not in kwargs:
            kwargs.update(upload_id=utils.create_uuid())
571
        kwargs.update(user_id=user.user_id)
Markus Scheidgen's avatar
Markus Scheidgen committed
572
        self = super().create(**kwargs)
573

Markus Scheidgen's avatar
Markus Scheidgen committed
574
        self._continue_with('uploading')
575

Markus Scheidgen's avatar
Markus Scheidgen committed
576
577
        return self

578
579
580
581
582
    def delete(self):
        """ Deletes this upload process state entry and its calcs. """
        Calc.objects(upload_id=self.upload_id).delete()
        super().delete()

583
    def delete_upload_local(self):
584
        """
585
        Deletes the upload, including its processing state and
586
        staging files. Local version without celery processing.
587
588
589
590
        """
        logger = self.get_logger()

        with utils.lnr(logger, 'staged upload delete failed'):
591
            with utils.timer(
592
                    logger, 'upload deleted from index', step='index',
593
                    upload_size=self.upload_files.size):
594
                search.delete_upload(self.upload_id)
595

596
            with utils.timer(
597
                    logger, 'staged upload deleted', step='files',
598
599
                    upload_size=self.upload_files.size):
                self.upload_files.delete()
600
601

            self.delete()
602

603
    @process
604
    def delete_upload(self):
605
606
607
608
        """
        Deletes of the upload, including its processing state and
        staging files. This starts the celery process of deleting the upload.
        """
609
        self.delete_upload_local()
610

611
        return True  # do not save the process status on the delete upload
612

613
    @process
614
    def publish_upload(self):
615
        """
616
617
        Moves the upload out of staging to the public area. It will
        pack the staging upload files in to public upload files.
618
        """
619
620
        assert self.processed_calcs > 0

621
        logger = self.get_logger()
622
        logger.info('started to publish')
623

624
        with utils.lnr(logger, 'publish failed'):
625
            upload_with_metadata = self.to_upload_with_metadata(self.metadata)
626
            calcs = upload_with_metadata.calcs
627

628
            with utils.timer(
629
                    logger, 'upload metadata updated', step='metadata',
630
                    upload_size=self.upload_files.size):
631
632

                def create_update(calc):
633
                    calc.published = True
634
                    calc.with_embargo = calc.with_embargo if calc.with_embargo is not None else False
635
636
637
638
639
                    return UpdateOne(
                        {'_id': calc.calc_id},
                        {'$set': {'metadata': calc.to_dict()}})

                Calc._get_collection().bulk_write([create_update(calc) for calc in calcs])
640

641
642
643
644
645
            if isinstance(self.upload_files, StagingUploadFiles):
                with utils.timer(
                        logger, 'staged upload files packed', step='pack',
                        upload_size=self.upload_files.size):
                    self.upload_files.pack(upload_with_metadata)
646
647
648
649

            with utils.timer(
                    logger, 'index updated', step='index',
                    upload_size=self.upload_files.size):
650
                search.publish(calcs)
651

652
653
654
655
656
657
            if isinstance(self.upload_files, StagingUploadFiles):
                with utils.timer(
                        logger, 'staged upload deleted', step='delete staged',
                        upload_size=self.upload_files.size):
                    self.upload_files.delete()
                    self.published = True
658
659
                    self.publish_time = datetime.utcnow()
                    self.last_update = datetime.utcnow()
660
661
                    self.save()
            else:
662
                self.last_update = datetime.utcnow()
663
                self.save()
664

665
666
667
    @process
    def re_process_upload(self):
        """
Markus Scheidgen's avatar
Markus Scheidgen committed
668
669
670
        A *process* that performs the re-processing of a earlier processed
        upload.

671
672
673
674
675
676
677
678
679
        Runs the distributed process of fully reparsing/renormalizing an existing and
        already published upload. Will renew the archive part of the upload and update
        mongo and elastic search entries.

        TODO this implementation does not do any re-matching. This will be more complex
        due to handling of new or missing matches.
        """
        assert self.published

680
681
682
        logger = self.get_logger()
        logger.info('started to re-process')

683
684
685
686
687
688
        # mock the steps of actual processing
        self._continue_with('uploading')

        # extract the published raw files into a staging upload files instance
        self._continue_with('extracting')
        public_upload_files = cast(PublicUploadFiles, self.upload_files)
689
        staging_upload_files = public_upload_files.to_staging_upload_files(create=True)
690
691

        self._continue_with('parse_all')
692
        try:
693
            # check if a calc is already/still processing
694
695
696
697
698
699
700
701
            processing = Calc.objects(
                upload_id=self.upload_id,
                **Calc.process_running_mongoengine_query()).count()

            if processing > 0:
                logger.warn(
                    'processes are still/already running on calc, they will be resetted',
                    count=processing)
702
703

            # reset all calcs
704
            Calc._get_collection().update_many(
705
706
707
                dict(upload_id=self.upload_id),
                {'$set': Calc.reset_pymongo_update(worker_hostname=self.worker_hostname)})

708
            # process call calcs
709
710
711
            Calc.process_all(Calc.re_process_calc, dict(upload_id=self.upload_id), exclude=['metadata'])

            logger.info('completed to trigger re-process of all calcs')
712
713
        except Exception as e:
            # try to remove the staging copy in failure case
714
715
            logger.error('failed to trigger re-process of all calcs', exc_info=e)

716
            if staging_upload_files is not None and staging_upload_files.exists():
717
718
719
                staging_upload_files.delete()

            raise e
720
721
722
723

        # the packing and removing of the staging upload files, will be trigged by
        # the 'cleanup' task after processing all calcs

724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
    @process
    def re_pack(self):
        """ A *process* that repacks the raw and archive data based on the current embargo data. """
        assert self.published

        # mock the steps of actual processing
        self._continue_with('uploading')
        self._continue_with('extracting')
        self._continue_with('parse_all')
        self._continue_with('cleanup')

        self.upload_files.re_pack(self.to_upload_with_metadata())
        self.joined = True
        self._complete()

Markus Scheidgen's avatar
Markus Scheidgen committed
739
    @process
740
    def process_upload(self):
Markus Scheidgen's avatar
Markus Scheidgen committed
741
        """ A *process* that performs the initial upload processing. """
Markus Scheidgen's avatar
Markus Scheidgen committed
742
743
744
745
746
        self.extracting()
        self.parse_all()

    @task
    def uploading(self):
Markus Scheidgen's avatar
Markus Scheidgen committed
747
        """ A no-op *task* as a stand-in for receiving upload data. """
Markus Scheidgen's avatar
Markus Scheidgen committed
748
749
        pass

750
    @property
751
752
    def upload_files(self) -> UploadFiles:
        upload_files_class = ArchiveBasedStagingUploadFiles if not self.published else PublicUploadFiles
753
        kwargs = dict(upload_path=self.upload_path) if not self.published else {}
754
755
756

        if not self._upload_files or not isinstance(self._upload_files, upload_files_class):
            self._upload_files = upload_files_class(
757
                self.upload_id, is_authorized=lambda: True, **kwargs)
758

759
        return self._upload_files
760

761
762
763
764
765
    @property
    def staging_upload_files(self) -> ArchiveBasedStagingUploadFiles:
        assert not self.published
        return cast(ArchiveBasedStagingUploadFiles, self.upload_files)

Markus Scheidgen's avatar
Markus Scheidgen committed
766
767
    @task
    def extracting(self):
768
        """
769
770
        The *task* performed before the actual parsing/normalizing: extracting
        the uploaded files.
771
        """
772
773
774
775
776
        # extract the uploaded file
        self._upload_files = ArchiveBasedStagingUploadFiles(
            upload_id=self.upload_id, is_authorized=lambda: True, create=True,
            upload_path=self.upload_path)

Markus Scheidgen's avatar
Markus Scheidgen committed
777
778
        logger = self.get_logger()
        try:
Markus Scheidgen's avatar
Markus Scheidgen committed
779
780
            with utils.timer(
                    logger, 'upload extracted', step='extracting',
781
782
                    upload_size=self.upload_files.size):
                self.upload_files.extract()
783
784
785
786

            if self.temporary:
                os.remove(self.upload_path)
                self.upload_path = None
787

788
        except KeyError:
789
            self.fail('processing requested for non existing upload', log_level=logging.ERROR)
790
791
792
            return
        except ExtractError:
            self.fail('bad .zip/.tar file', log_level=logging.INFO)
Markus Scheidgen's avatar
Markus Scheidgen committed
793
794
            return

795
796
797
798
799
    def _preprocess_files(self, path):
        """
        Some files need preprocessing. Currently we need to add a stripped POTCAR version
        and always restrict/embargo the original.
        """
800
        if os.path.basename(path).startswith('POTCAR'):
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
            # create checksum
            hash = hashlib.sha224()
            with open(self.staging_upload_files.raw_file_object(path).os_path, 'rb') as orig_f:
                for line in orig_f.readlines():
                    hash.update(line)

            checksum = hash.hexdigest()

            # created stripped POTCAR
            stripped_path = path + '.stripped'
            with open(self.staging_upload_files.raw_file_object(stripped_path).os_path, 'wt') as stripped_f:
                stripped_f.write('Stripped POTCAR file. Checksum of original file (sha224): %s\n' % checksum)
            os.system(
                '''
                    awk < %s >> %s '
                    BEGIN { dump=1 }
                    /End of Dataset/ { dump=1 }
                    dump==1 { print }
                    /END of PSCTR/ { dump=0 }'
                ''' % (
                    self.staging_upload_files.raw_file_object(path).os_path,
                    self.staging_upload_files.raw_file_object(stripped_path).os_path))

824
    def match_mainfiles(self) -> Generator[Tuple[str, object], None, None]:
825
826
827
828
829
830
831
        """
        Generator function that matches all files in the upload to all parsers to
        determine the upload's mainfiles.

        Returns:
            Tuples of mainfile, filename, and parsers
        """
832
        directories_with_match: Dict[str, str] = dict()
833
834
        upload_files = self.staging_upload_files
        for filename in upload_files.raw_file_manifest():
835
            self._preprocess_files(filename)
836
            try:
837
                parser = match_parser(filename, upload_files)
838
                if parser is not None:
839
840
                    directory = os.path.dirname(filename)
                    if directory in directories_with_match:
841
842
843
                        # TODO this might give us the chance to store directory based relationship
                        # between calcs for the future?
                        pass
844
845
                    else:
                        directories_with_match[directory] = filename
846
847

                    yield filename, parser
848
849
850
851
            except Exception as e:
                self.get_logger().error(
                    'exception while matching pot. mainfile',
                    mainfile=filename, exc_info=e)
852

Markus Scheidgen's avatar
Markus Scheidgen committed
853
854
    @task
    def parse_all(self):
855
        """
Markus Scheidgen's avatar
Markus Scheidgen committed
856
        The *task* used to identify mainfile/parser combinations among the upload's files, creates
857
858
        respective :class:`Calc` instances, and triggers their processing.
        """
859
860
        logger = self.get_logger()

Markus Scheidgen's avatar
Markus Scheidgen committed
861
862
        with utils.timer(
                logger, 'upload extracted', step='matching',
863
864
                upload_size=self.upload_files.size):
            for filename, parser in self.match_mainfiles():
865
                calc = Calc.create(
866
                    calc_id=self.upload_files.calc_id(filename),
867
                    mainfile=filename, parser=parser.name,
868
                    worker_hostname=self.worker_hostname,
869
870
                    upload_id=self.upload_id)

871
                calc.process_calc()
Markus Scheidgen's avatar
Markus Scheidgen committed
872

873
    def on_process_complete(self, process_name):
874
        if process_name == 'process_upload' or process_name == 're_process_upload':
875
876
877
            self.check_join()

    def check_join(self):
Markus Scheidgen's avatar
Markus Scheidgen committed
878
879
880
881
882
883
884
885
        """
        Performs an evaluation of the join condition and triggers the :func:`cleanup`
        task if necessary. The join condition allows to run the ``cleanup`` after
        all calculations have been processed. The upload processing stops after all
        calculation processings have been triggered (:func:`parse_all` or
        :func:`re_process_upload`). The cleanup task is then run within the last
        calculation process (the one that triggered the join by calling this method).
        """
886
887
888
889
        total_calcs = self.total_calcs
        processed_calcs = self.processed_calcs

        self.get_logger().debug('check join', processed_calcs=processed_calcs, total_calcs=total_calcs)
890
891
        # check if process is not running anymore, i.e. not still spawining new processes to join
        # check the join condition, i.e. all calcs have been processed
892
        if not self.process_running and processed_calcs >= total_calcs:
893
            # this can easily be called multiple times, e.g. upload finished after all calcs finished
894
895
896
            modified_upload = self._get_collection().find_one_and_update(
                {'_id': self.upload_id, 'joined': {'$ne': True}},
                {'$set': {'joined': True}})
897
898
899
900
901
902
            if modified_upload is not None:
                self.get_logger().debug('join')
                self.cleanup()
            else:
                # the join was already done due to a prior call
                pass
903

904
905
906
    def reset(self):
        self.joined = False
        super().reset()
Markus Scheidgen's avatar
Markus Scheidgen committed
907

908
909
910
911
912
913
    @classmethod
    def reset_pymongo_update(cls, worker_hostname: str = None):
        update = super().reset_pymongo_update()
        update.update(joined=False)
        return update

914
    def _cleanup_after_processing(self):
915
916
917
918
919
920
        # send email about process finish
        user = self.uploader
        name = '%s %s' % (user.first_name, user.last_name)
        message = '\n'.join([
            'Dear %s,' % name,
            '',
921
            'your data %suploaded at %s has completed processing.' % (
922
                '"%s" ' % self.name if self.name else '', self.upload_time.isoformat()),  # pylint: disable=no-member
923
            'You can review your data on your upload page: %s' % config.gui_url(),
924
925
926
927
            '',
            'If you encouter any issues with your upload, please let us know and replay to this email.',
            '',
            'The nomad team'
928
        ])
929
930
931
932
933
934
935
        try:
            infrastructure.send_mail(
                name=name, email=user.email, message=message, subject='Processing completed')
        except Exception as e:
            # probably due to email configuration problems
            # don't fail or present this error to clients
            self.logger.error('could not send after processing email', exc_info=e)
Markus Scheidgen's avatar
Markus Scheidgen committed
936

937
938
939
940
941
942
943
944
945
946
    def _cleanup_after_re_processing(self):
        logger = self.get_logger()
        logger.info('started to repack re-processed upload')

        staging_upload_files = self.upload_files.to_staging_upload_files()

        with utils.timer(
                logger, 'reprocessed staged upload packed', step='delete staged',
                upload_size=self.upload_files.size):

947
            staging_upload_files.pack(self.to_upload_with_metadata(), skip_raw=True)
948
949
950
951
952
953

        with utils.timer(
                logger, 'reprocessed staged upload deleted', step='delete staged',
                upload_size=self.upload_files.size):

            staging_upload_files.delete()
954
            self.last_update = datetime.utcnow()
955
956
957
958
            self.save()

    @task
    def cleanup(self):
Markus Scheidgen's avatar
Markus Scheidgen committed
959
960
961
962
        """
        The *task* that "cleans" the processing, i.e. removed obsolete files and performs
        pending archival operations. Depends on the type of processing.
        """
963
964
965
966
967
968
969
        search.refresh()

        if self.current_process == 're_process_upload':
            self._cleanup_after_re_processing()
        else:
            self._cleanup_after_processing()

970
    def get_calc(self, calc_id) -> Calc: