data.py 17.9 KB
Newer Older
Markus Scheidgen's avatar
Markus Scheidgen committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
# Copyright 2018 Markus Scheidgen
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an"AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

"""
This module comprises a set of persistent document classes that hold all user related
data. These are information about users, their uploads and datasets, the associated
calculations, and files


.. autoclass:: Calc
    :members:
.. autoclass:: Upload
    :members:
"""

27
from typing import List, Any, ContextManager, Tuple, Generator
Markus Scheidgen's avatar
Markus Scheidgen committed
28
from mongoengine import StringField, BooleanField, DateTimeField, DictField, IntField
Markus Scheidgen's avatar
Markus Scheidgen committed
29
import logging
30
from structlog import wrap_logger
31
from contextlib import contextmanager
Markus Scheidgen's avatar
Markus Scheidgen committed
32

33
from nomad import utils, coe_repo, datamodel
34
from nomad.files import PathObject, ArchiveBasedStagingUploadFiles
Markus Scheidgen's avatar
Markus Scheidgen committed
35
from nomad.repo import RepoCalc, RepoUpload
36
from nomad.processing.base import Proc, Chord, process, task, PENDING, SUCCESS, FAILURE
Markus Scheidgen's avatar
Markus Scheidgen committed
37
from nomad.parsing import parsers, parser_dict
Markus Scheidgen's avatar
Markus Scheidgen committed
38
39
40
41
42
43
from nomad.normalizing import normalizers


class NotAllowedDuringProcessing(Exception): pass


44
class Calc(Proc, datamodel.Calc):
Markus Scheidgen's avatar
Markus Scheidgen committed
45
46
47
48
49
50
51
52
53
54
    """
    Instances of this class represent calculations. This class manages the elastic
    search index entry, files, and archive for the respective calculation.

    It also contains the calculations processing and its state.

    The attribute list, does not include the various repository properties generated
    while parsing, including ``program_name``, ``program_version``, etc.

    Attributes:
55
        archive_id: the full id upload_id and calc_id based id
Markus Scheidgen's avatar
Markus Scheidgen committed
56
57
58
59
60
61
62
63
64
65
66
        parser: the name of the parser used to process this calc
        upload_id: the id of the upload used to create this calculation
        mainfile: the mainfile (including path in upload) that was used to create this calc
    """
    archive_id = StringField(primary_key=True)
    upload_id = StringField()
    mainfile = StringField()
    parser = StringField()

    meta: Any = {
        'indices': [
67
            'upload_id', 'mainfile', 'code', 'parser', 'status'
Markus Scheidgen's avatar
Markus Scheidgen committed
68
69
70
71
72
73
        ]
    }

    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self._parser_backend = None
74
75
        self._upload: Upload = None
        self._upload_files: ArchiveBasedStagingUploadFiles = None
76
        self._calc_proc_logwriter = None
77
        self._calc_proc_logwriter_ctx: ContextManager = None
Markus Scheidgen's avatar
Markus Scheidgen committed
78
79
80
81
82

    @classmethod
    def get(cls, id):
        return cls.get_by_id(id, 'archive_id')

Markus Scheidgen's avatar
Markus Scheidgen committed
83
    @property
84
85
    def mainfile_file(self) -> PathObject:
        return self.upload_files.raw_file_object(self.mainfile)
Markus Scheidgen's avatar
Markus Scheidgen committed
86

87
    @property
88
89
    def calc_id(self) -> str:
        return utils.archive.calc_id(self.archive_id)
90

91
92
93
94
95
96
    @property
    def upload(self) -> 'Upload':
        if not self._upload:
            self._upload = Upload.get(self.upload_id)
        return self._upload

97
98
99
    @property
    def upload_files(self) -> ArchiveBasedStagingUploadFiles:
        if not self._upload_files:
100
            self._upload_files = ArchiveBasedStagingUploadFiles(self.upload_id, is_authorized=lambda: True, local_path=self.upload.local_path)
101
102
        return self._upload_files

Markus Scheidgen's avatar
Markus Scheidgen committed
103
104
105
    def get_logger(self, **kwargs):
        logger = super().get_logger()
        logger = logger.bind(
106
            upload_id=self.upload_id, mainfile=self.mainfile, calc_id=self.calc_id,
Markus Scheidgen's avatar
Markus Scheidgen committed
107
            archive_id=self.archive_id, **kwargs)
108

Markus Scheidgen's avatar
Markus Scheidgen committed
109
110
        return logger

111
112
113
114
115
116
117
118
    def get_calc_logger(self, **kwargs):
        """
        Returns a wrapped logger that additionally saves all entries to the calculation
        processing log in the archive.
        """
        logger = self.get_logger(**kwargs)

        if self._calc_proc_logwriter is None:
119
            self._calc_proc_logwriter_ctx = self.upload_files.archive_log_file(self.calc_id, 'wt')
120
            self._calc_proc_logwriter = self._calc_proc_logwriter_ctx.__enter__()  # pylint: disable=E1101
121

122
        def save_to_calc_log(logger, method_name, event_dict):
123
124
125
126
127
128
129
130
131
132
133
            program = event_dict.get('normalizer', 'parser')
            event = event_dict.get('event', '')
            entry = '[%s] %s: %s' % (method_name, program, event)
            if len(entry) > 120:
                self._calc_proc_logwriter.write(entry[:120])
                self._calc_proc_logwriter.write('...')
            else:
                self._calc_proc_logwriter.write(entry)
            self._calc_proc_logwriter.write('\n')
            return event_dict

134
        return wrap_logger(logger, processors=[save_to_calc_log])
135

Markus Scheidgen's avatar
Markus Scheidgen committed
136
137
    @process
    def process(self):
138
        logger = self.get_logger()
139
        if self.upload is None:
140
            logger.error('calculation upload does not exist')
Markus Scheidgen's avatar
Markus Scheidgen committed
141
142
143
144
145
146

        try:
            self.parsing()
            self.normalizing()
            self.archiving()
        finally:
Markus Scheidgen's avatar
Markus Scheidgen committed
147
            # close loghandler that was not closed due to failures
148
            try:
149
150
151
                if self._calc_proc_logwriter is not None:
                    self._calc_proc_logwriter.close()
                    self._calc_proc_logwriter = None
152
153
154
155
            except Exception as e:
                logger.error('could not close calculation proc log', exc_info=e)

            # inform parent proc about completion
156
            self.upload.completed_child()
Markus Scheidgen's avatar
Markus Scheidgen committed
157
158
159

    @task
    def parsing(self):
160
161
        context = dict(parser=self.parser, step=self.parser)
        logger = self.get_calc_logger(**context)
162
        parser = parser_dict[self.parser]
Markus Scheidgen's avatar
Markus Scheidgen committed
163

164
        with utils.timer(logger, 'parser executed', input_size=self.mainfile_file.size):
165
166
            self._parser_backend = parser.run(
                self.upload_files.raw_file_object(self.mainfile).os_path, logger=logger)
Markus Scheidgen's avatar
Markus Scheidgen committed
167

168
169
170
171
172
173
        self._parser_backend.openNonOverlappingSection('section_calculation_info')
        self._parser_backend.addValue('upload_id', self.upload_id)
        self._parser_backend.addValue('archive_id', self.archive_id)
        self._parser_backend.addValue('main_file', self.mainfile)
        self._parser_backend.addValue('parser_name', self.parser)

Markus Scheidgen's avatar
Markus Scheidgen committed
174
        if self._parser_backend.status[0] != 'ParseSuccess':
175
            logger.error(self._parser_backend.status[1])
Markus Scheidgen's avatar
Markus Scheidgen committed
176
            error = self._parser_backend.status[1]
177
            self._parser_backend.addValue('parse_status', 'ParseFailure')
178
            self.fail(error, level=logging.DEBUG, **context)
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
        else:
            self._parser_backend.addValue('parse_status', 'ParseSuccess')

        self._parser_backend.closeNonOverlappingSection('section_calculation_info')

        self.add_processor_info(self.parser)

    @contextmanager
    def use_parser_backend(self, processor_name):
        self._parser_backend.reset_status()
        yield self._parser_backend
        self.add_processor_info(processor_name)

    def add_processor_info(self, processor_name: str) -> None:
        self._parser_backend.openContext('/section_calculation_info/0')
        self._parser_backend.openNonOverlappingSection('section_archive_processing_info')
        self._parser_backend.addValue('archive_processor_name', processor_name)

        if self._parser_backend.status[0] == 'ParseSuccess':
            warnings = getattr(self._parser_backend, '_warnings', [])
            if len(warnings) > 0:
                self._parser_backend.addValue('archive_processor_status', 'WithWarnings')
                self._parser_backend.addValue('archive_processor_warning_number', len(warnings))
                self._parser_backend.addArrayValues('archive_processor_warnings', [str(warning) for warning in warnings])
            else:
                self._parser_backend.addValue('archive_processor_status', 'Success')
        else:
            errors = self._parser_backend.status[1]
            self._parser_backend.addValue('archive_processor_error', str(errors))

        self._parser_backend.closeNonOverlappingSection('section_archive_processing_info')
        self._parser_backend.closeContext('/section_calculation_info/0')
Markus Scheidgen's avatar
Markus Scheidgen committed
211
212
213
214
215

    @task
    def normalizing(self):
        for normalizer in normalizers:
            normalizer_name = normalizer.__name__
216
217
            context = dict(normalizer=normalizer_name, step=normalizer_name)
            logger = self.get_calc_logger(**context)
Markus Scheidgen's avatar
Markus Scheidgen committed
218
219

            with utils.timer(
220
                    logger, 'normalizer executed', input_size=self.mainfile_file.size):
221
222
                with self.use_parser_backend(normalizer_name) as backend:
                    normalizer(backend).normalize(logger=logger)
Markus Scheidgen's avatar
Markus Scheidgen committed
223

224
225
            failed = self._parser_backend.status[0] != 'ParseSuccess'
            if failed:
226
                logger.error(self._parser_backend.status[1])
Markus Scheidgen's avatar
Markus Scheidgen committed
227
                error = self._parser_backend.status[1]
228
                self.fail(error, level=logging.WARNING, **context)
229
230
231
232
                break
            else:
                logger.debug(
                    'completed normalizer successfully', normalizer=normalizer_name)
Markus Scheidgen's avatar
Markus Scheidgen committed
233
234
235

    @task
    def archiving(self):
236
237
        logger = self.get_logger()

238
        _, calc_id = self.archive_id.split('/')
239
240
        additional = dict(
            mainfile=self.mainfile,
241
            upload_time=self.upload.upload_time,
242
243
            staging=True,
            restricted=False,
244
            user_id=self.upload.user_id,
245
            aux_files=list(self.upload_files.calc_files(self.mainfile, with_mainfile=False)))
246
247

        with utils.timer(logger, 'indexed', step='index'):
248
            # persist to elastic search
249
            repo_calc = RepoCalc.create_from_backend(
250
251
                self._parser_backend,
                additional=additional,
252
                calc_id=calc_id,
253
254
                upload_id=self.upload_id)
            repo_calc.persist()
255

Markus Scheidgen's avatar
Markus Scheidgen committed
256
257
258
259
        with utils.timer(
                logger, 'archived', step='archive',
                input_size=self.mainfile_file.size) as log_data:

260
            # persist the archive
261
            with self.upload_files.archive_file(self.calc_id, 'wt') as out:
262
263
                self._parser_backend.write_json(out, pretty=True)

264
            log_data.update(archive_size=self.upload_files.archive_file_object(self.calc_id).size)
Markus Scheidgen's avatar
Markus Scheidgen committed
265
266
267
268
269
270

        # close loghandler
        if self._calc_proc_logwriter is not None:
            with utils.timer(
                    logger, 'archived log', step='archive_log',
                    input_size=self.mainfile_file.size) as log_data:
271
                self._calc_proc_logwriter_ctx.__exit__(None, None, None)  # pylint: disable=E1101
272
                self._calc_proc_logwriter = None
Markus Scheidgen's avatar
Markus Scheidgen committed
273

274
                log_data.update(log_size=self.upload_files.archive_log_file_object(self.calc_id).size)
Markus Scheidgen's avatar
Markus Scheidgen committed
275

Markus Scheidgen's avatar
Markus Scheidgen committed
276

277
class Upload(Chord, datamodel.Upload):
Markus Scheidgen's avatar
Markus Scheidgen committed
278
279
280
281
282
283
    """
    Represents uploads in the databases. Provides persistence access to the files storage,
    and processing state.

    Attributes:
        name: optional user provided upload name
284
        local_path: optional local path, e.g. for files that are already somewhere on the server
Markus Scheidgen's avatar
Markus Scheidgen committed
285
286
287
288
        additional_metadata: optional user provided additional meta data
        upload_id: the upload id generated by the database
        is_private: true if the upload and its derivitaves are only visible to the uploader
        upload_time: the timestamp when the system realised the upload
289
        user_id: the id of the user that created this upload
Markus Scheidgen's avatar
Markus Scheidgen committed
290
291
292
293
294
295
    """
    id_field = 'upload_id'

    upload_id = StringField(primary_key=True)

    name = StringField(default=None)
296
    local_path = StringField(default=None)
Markus Scheidgen's avatar
Markus Scheidgen committed
297
298
299
300
301
302
    additional_metadata = DictField(default=None)

    is_private = BooleanField(default=False)

    upload_time = DateTimeField()

303
    user_id = StringField(required=True)
Markus Scheidgen's avatar
Markus Scheidgen committed
304

305
306
    coe_repo_upload_id = IntField(default=None)

307
308
    _initiated_parsers = IntField(default=-1)

Markus Scheidgen's avatar
Markus Scheidgen committed
309
310
    meta: Any = {
        'indexes': [
311
            'user_id', 'status'
Markus Scheidgen's avatar
Markus Scheidgen committed
312
313
314
315
316
        ]
    }

    def __init__(self, **kwargs):
        super().__init__(**kwargs)
317
        self._upload_files: ArchiveBasedStagingUploadFiles = None
Markus Scheidgen's avatar
Markus Scheidgen committed
318
319
320
321
322
323

    @classmethod
    def get(cls, id):
        return cls.get_by_id(id, 'upload_id')

    @classmethod
324
    def user_uploads(cls, user: coe_repo.User) -> List['Upload']:
Markus Scheidgen's avatar
Markus Scheidgen committed
325
        """ Returns all uploads for the given user. Currently returns all uploads. """
326
        return cls.objects(user_id=str(user.user_id))
Markus Scheidgen's avatar
Markus Scheidgen committed
327

328
329
330
331
    @property
    def uploader(self):
        return coe_repo.User.from_user_id(self.user_id)

Markus Scheidgen's avatar
Markus Scheidgen committed
332
333
334
335
336
337
    def get_logger(self, **kwargs):
        logger = super().get_logger()
        logger = logger.bind(upload_id=self.upload_id, **kwargs)
        return logger

    def delete(self):
338
        if not (self.completed or self.current_task == 'uploading'):
Markus Scheidgen's avatar
Markus Scheidgen committed
339
            raise NotAllowedDuringProcessing()
340
341
        Calc.objects(upload_id=self.upload_id).delete()
        super().delete()
Markus Scheidgen's avatar
Markus Scheidgen committed
342
343
344
345
346
347
348

    @classmethod
    def create(cls, **kwargs) -> 'Upload':
        """
        Creates a new upload for the given user, a user given name is optional.
        It will populate the record with a signed url and pending :class:`UploadProc`.
        The upload will be already saved to the database.
349
350

        Arguments:
351
            user (coe_repo.User): The user that created the upload.
Markus Scheidgen's avatar
Markus Scheidgen committed
352
        """
353
        user: coe_repo.User = kwargs['user']
354
355
356
        del(kwargs['user'])
        if 'upload_id' not in kwargs:
            kwargs.update(upload_id=utils.create_uuid())
357
        kwargs.update(user_id=str(user.user_id))
Markus Scheidgen's avatar
Markus Scheidgen committed
358
        self = super().create(**kwargs)
359

Markus Scheidgen's avatar
Markus Scheidgen committed
360
        self._continue_with('uploading')
361

Markus Scheidgen's avatar
Markus Scheidgen committed
362
363
        return self

364
    def unstage(self, meta_data):
365
        self.get_logger().info('unstage')
366
367
368
369

        if not (self.completed or self.current_task == 'uploading'):
            raise NotAllowedDuringProcessing()

370
371
        self.delete()

Markus Scheidgen's avatar
Markus Scheidgen committed
372
        self.to(RepoUpload).unstage()
373
        coe_repo.Upload.add(self, meta_data)
Markus Scheidgen's avatar
Markus Scheidgen committed
374
375
        self.save()

376
377
378
        self.upload_files.pack()
        self.upload_files.delete()

Markus Scheidgen's avatar
Markus Scheidgen committed
379
380
381
382
383
384
385
386
387
    @process
    def process(self):
        self.extracting()
        self.parse_all()

    @task
    def uploading(self):
        pass

388
    @property
389
390
    def upload_files(self) -> ArchiveBasedStagingUploadFiles:
        if not self._upload_files:
391
            self._upload_files = ArchiveBasedStagingUploadFiles(self.upload_id, is_authorized=lambda: True, local_path=self.local_path)
392
        return self._upload_files
393

Markus Scheidgen's avatar
Markus Scheidgen committed
394
395
    @task
    def extracting(self):
396
397
398
399
400
401
        """
        Task performed before the actual parsing/normalizing. Extracting and bagging
        the uploaded files, computing all keys, create an *upload* entry in the NOMAD-coe
        repository db, etc.
        """
        # extract the uploaded file, this will also create a bagit bag.
Markus Scheidgen's avatar
Markus Scheidgen committed
402
403
        logger = self.get_logger()
        try:
Markus Scheidgen's avatar
Markus Scheidgen committed
404
405
            with utils.timer(
                    logger, 'upload extracted', step='extracting',
406
407
                    upload_size=self.upload_files.size):
                self.upload_files.extract()
408
        except KeyError:
409
            self.fail('process request for non existing upload', level=logging.ERROR)
Markus Scheidgen's avatar
Markus Scheidgen committed
410
411
            return

412
        # check if the file was already uploaded and processed before
Markus Scheidgen's avatar
Markus Scheidgen committed
413
        if self.to(RepoUpload).exists():
Markus Scheidgen's avatar
Markus Scheidgen committed
414
415
416
            self.fail('The same file was already uploaded and processed.', level=logging.INFO)
            return

417
    def match_mainfiles(self) -> Generator[Tuple[str, object], None, None]:
418
419
420
421
422
423
424
        """
        Generator function that matches all files in the upload to all parsers to
        determine the upload's mainfiles.

        Returns:
            Tuples of mainfile, filename, and parsers
        """
425
        for filename in self.upload_files.raw_file_manifest():
426
427
            for parser in parsers:
                try:
428
                    with self.upload_files.raw_file(filename) as mainfile_f:
429
                        if parser.is_mainfile(filename, lambda fn: mainfile_f):
430
                            yield filename, parser
431
                except Exception as e:
432
                    self.get_logger().error(
433
434
435
                        'exception while matching pot. mainfile',
                        mainfile=filename, exc_info=e)

Markus Scheidgen's avatar
Markus Scheidgen committed
436
437
    @task
    def parse_all(self):
438
        """
439
        Identified mainfile/parser combinations among the upload's files, creates
440
441
        respective :class:`Calc` instances, and triggers their processing.
        """
442
443
        logger = self.get_logger()

Markus Scheidgen's avatar
Markus Scheidgen committed
444
        # TODO: deal with multiple possible parser specs
Markus Scheidgen's avatar
Markus Scheidgen committed
445
446
        with utils.timer(
                logger, 'upload extracted', step='matching',
447
                upload_size=self.upload_files.size):
448
            total_calcs = 0
449
            for filename, parser in self.match_mainfiles():
450
                calc = Calc.create(
451
                    archive_id='%s/%s' % (self.upload_id, self.upload_files.calc_id(filename)),
452
453
454
455
456
                    mainfile=filename, parser=parser.name,
                    upload_id=self.upload_id)

                calc.process()
                total_calcs += 1
Markus Scheidgen's avatar
Markus Scheidgen committed
457

458
459
460
461
462
        # have to save the total_calcs information for chord management
        self.spwaned_childred(total_calcs)

    def join(self):
        self.cleanup()
Markus Scheidgen's avatar
Markus Scheidgen committed
463
464
465

    @task
    def cleanup(self):
466
467
468
469
470
        # TODO issue #83
        with utils.timer(
                self.get_logger(), 'pack staging upload', step='cleaning',
                upload_size=self.upload_files.size):
            pass
Markus Scheidgen's avatar
Markus Scheidgen committed
471
472

    @property
473
474
475
476
477
478
479
480
481
482
483
    def processed_calcs(self):
        return Calc.objects(upload_id=self.upload_id, status__in=[SUCCESS, FAILURE]).count()

    @property
    def total_calcs(self):
        return Calc.objects(upload_id=self.upload_id).count()

    @property
    def failed_calcs(self):
        return Calc.objects(upload_id=self.upload_id, status=FAILURE).count()

484
485
486
487
    @property
    def pending_calcs(self):
        return Calc.objects(upload_id=self.upload_id, status=PENDING).count()

488
489
    def all_calcs(self, start, end, order_by='mainfile'):
        return Calc.objects(upload_id=self.upload_id)[start:end].order_by(order_by)
490
491
492
493

    @property
    def calcs(self):
        return Calc.objects(upload_id=self.upload_id)