repo.py 26.4 KB
Newer Older
Markus Scheidgen's avatar
Markus Scheidgen committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
# Copyright 2018 Markus Scheidgen
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an"AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

"""
The repository API of the nomad@FAIRDI APIs. Currently allows to resolve repository
meta-data.
"""

20
from typing import List, Dict, Any
Markus Scheidgen's avatar
Markus Scheidgen committed
21
from flask_restplus import Resource, abort, fields
22
from flask import request, g
23
from elasticsearch.exceptions import NotFoundError
24
import elasticsearch.helpers
Markus Scheidgen's avatar
Markus Scheidgen committed
25

26
27
from nomad import search, utils, datamodel, processing as proc, infrastructure
from nomad.app.utils import rfc3339DateTime, RFC3339DateTime, with_logger
28
from nomad.app.optimade import filterparser
29
from nomad.datamodel import UserMetadata, Dataset, User
Markus Scheidgen's avatar
Markus Scheidgen committed
30

Markus Scheidgen's avatar
Markus Scheidgen committed
31
from .api import api
32
from .auth import authenticate
33
from .common import pagination_model, pagination_request_parser, calc_route
Markus Scheidgen's avatar
Markus Scheidgen committed
34

35
ns = api.namespace('repo', description='Access repository metadata.')
Markus Scheidgen's avatar
Markus Scheidgen committed
36
37
38
39
40


@calc_route(ns)
class RepoCalcResource(Resource):
    @api.response(404, 'The upload or calculation does not exist')
41
    @api.response(401, 'Not authorized to access the calculation')
42
    @api.response(200, 'Metadata send', fields.Raw)
43
    @api.doc('get_repo_calc')
44
    @authenticate()
45
    def get(self, upload_id, calc_id):
Markus Scheidgen's avatar
Markus Scheidgen committed
46
47
48
        """
        Get calculation metadata in repository form.

49
        Repository metadata only entails the quantities shown in the repository.
50
        Calcs are references via *upload_id*, *calc_id* pairs.
Markus Scheidgen's avatar
Markus Scheidgen committed
51
52
        """
        try:
53
54
55
56
57
58
59
60
            calc = search.Entry.get(calc_id)
        except NotFoundError:
            abort(404, message='There is no calculation %s/%s' % (upload_id, calc_id))

        if calc.with_embargo or not calc.published:
            if g.user is None:
                abort(401, message='Not logged in to access %s/%s.' % (upload_id, calc_id))

61
            if not (any(g.user.user_id == user.user_id for user in calc.owners) or g.user.is_admin):
62
63
64
                abort(401, message='Not authorized to access %s/%s.' % (upload_id, calc_id))

        return calc.to_dict(), 200
Markus Scheidgen's avatar
Markus Scheidgen committed
65
66
67


repo_calcs_model = api.model('RepoCalculations', {
Markus Scheidgen's avatar
Markus Scheidgen committed
68
    'pagination': fields.Nested(pagination_model, skip_none=True),
69
70
71
72
    'scroll': fields.Nested(allow_null=True, skip_none=True, model=api.model('Scroll', {
        'total': fields.Integer(description='The total amount of hits for the search.'),
        'scroll_id': fields.String(allow_null=True, description='The scroll_id that can be used to retrieve the next page.'),
        'size': fields.Integer(help='The size of the returned scroll page.')})),
73
74
75
    'results': fields.List(fields.Raw, description=(
        'A list of search results. Each result is a dict with quantitie names as key and '
        'values as values')),
76
77
78
79
    'statistics': fields.Raw(description=(
        'A dict with all statistics. Each statistic is dictionary with a metrics dict as '
        'value and quantity value as key. The possible metrics are code runs(calcs), %s. '
        'There is a pseudo quantity "total" with a single value "all" that contains the '
80
        ' metrics over all results. ' % ', '.join(datamodel.Domain.instance.metrics_names))),
81
    'datasets': fields.Nested(api.model('RepoDatasets', {
82
83
        'after': fields.String(description='The after value that can be used to retrieve the next datasets.'),
        'values': fields.Raw(description='A dict with names as key. The values are dicts with "total" and "examples" keys.')
Markus Scheidgen's avatar
Markus Scheidgen committed
84
    }), skip_none=True)
Markus Scheidgen's avatar
Markus Scheidgen committed
85
86
})

87

88
89
repo_calc_id_model = api.model('RepoCalculationId', {
    'upload_id': fields.String(), 'calc_id': fields.String()
Markus Scheidgen's avatar
Markus Scheidgen committed
90
91
})

92
93
94
95
96
97
98
99
100
101
102
103

def add_common_parameters(request_parser):
    request_parser.add_argument(
        'owner', type=str,
        help='Specify which calcs to return: ``all``, ``public``, ``user``, ``staging``, default is ``all``')
    request_parser.add_argument(
        'from_time', type=lambda x: rfc3339DateTime.parse(x),
        help='A yyyy-MM-ddTHH:mm:ss (RFC3339) minimum entry time (e.g. upload time)')
    request_parser.add_argument(
        'until_time', type=lambda x: rfc3339DateTime.parse(x),
        help='A yyyy-MM-ddTHH:mm:ss (RFC3339) maximum entry time (e.g. upload time)')

104
    for quantity in search.quantities.values():
105
        request_parser.add_argument(
106
            quantity.name, help=quantity.description,
107
            action=quantity.argparse_action if quantity.multi else None)
108
109


Markus Scheidgen's avatar
Markus Scheidgen committed
110
repo_request_parser = pagination_request_parser.copy()
111
add_common_parameters(repo_request_parser)
112
113
114
115
repo_request_parser.add_argument(
    'scroll', type=bool, help='Enable scrolling')
repo_request_parser.add_argument(
    'scroll_id', type=str, help='The id of the current scrolling window to use.')
116
117
repo_request_parser.add_argument(
    'date_histogram', type=bool, help='Add an additional aggregation over the upload time')
118
repo_request_parser.add_argument(
119
    'datasets_after', type=str, help='The last dataset id of the last scroll window for the dataset quantity')
Markus Scheidgen's avatar
Markus Scheidgen committed
120
repo_request_parser.add_argument(
121
    'metrics', type=str, action='append', help=(
122
        'Metrics to aggregate over all quantities and their values as comma separated list. '
123
        'Possible values are %s.' % ', '.join(datamodel.Domain.instance.metrics_names)))
Markus Scheidgen's avatar
Markus Scheidgen committed
124
125
126
127
repo_request_parser.add_argument(
    'datasets', type=bool, help=('Return dataset information.'))
repo_request_parser.add_argument(
    'statistics', type=bool, help=('Return statistics.'))
Markus Scheidgen's avatar
Markus Scheidgen committed
128

129

130
131
132
133
search_request_parser = api.parser()
add_common_parameters(search_request_parser)


134
def add_query(search_request: search.SearchRequest, args: Dict[str, Any]):
135
    """
136
    Help that adds query relevant request args to the given SearchRequest.
137
    """
138
    args = {key: value for key, value in args.items() if value is not None}
139

140
    # owner
141
    owner = args.get('owner', 'all')
142
143
    try:
        search_request.owner(
144
            owner,
145
146
            g.user.user_id if g.user is not None else None)
    except ValueError as e:
147
        abort(401, getattr(e, 'message', 'Invalid owner parameter: %s' % owner))
148
149
150
151
    except Exception as e:
        abort(400, getattr(e, 'message', 'Invalid owner parameter'))

    # time range
152
153
    from_time_str = args.get('from_time', None)
    until_time_str = args.get('until_time', None)
154
155

    try:
156
157
158
        from_time = rfc3339DateTime.parse(from_time_str) if from_time_str is not None else None
        until_time = rfc3339DateTime.parse(until_time_str) if until_time_str is not None else None
        search_request.time_range(start=from_time, end=until_time)
159
160
161
    except Exception:
        abort(400, message='bad datetime format')

162
163
    # optimade
    try:
164
        optimade = args.get('optimade', None)
165
166
167
168
169
170
        if optimade is not None:
            q = filterparser.parse_filter(optimade)
            search_request.query(q)
    except filterparser.FilterException:
        abort(400, message='could not parse optimade query')

171
172
    # search parameter
    search_request.search_parameters(**{
173
        key: value for key, value in args.items()
174
        if key not in ['optimade'] and key in search.quantities})
175
176


Markus Scheidgen's avatar
Markus Scheidgen committed
177
178
@ns.route('/')
class RepoCalcsResource(Resource):
179
    @api.doc('search')
180
    @api.response(400, 'Invalid requests, e.g. wrong owner type or bad search parameters')
Markus Scheidgen's avatar
Markus Scheidgen committed
181
    @api.expect(repo_request_parser, validate=True)
182
    @api.marshal_with(repo_calcs_model, skip_none=True, code=200, description='Search results send')
183
    @authenticate()
Markus Scheidgen's avatar
Markus Scheidgen committed
184
185
    def get(self):
        """
186
        Search for calculations in the repository form, paginated.
187
188

        The ``owner`` parameter determines the overall entries to search through.
189
190
191
192
        Possible values are: ``all`` (show all entries visible to the current user), ``public``
        (show all publically visible entries), ``user`` (show all user entries, requires login),
        ``staging`` (show all user entries in staging area, requires login).

193
194
195
196
197
        You can use the various quantities to search/filter for. For some of the
        indexed quantities this endpoint returns aggregation information. This means
        you will be given a list of all possible values and the number of entries
        that have the certain value. You can also use these aggregations on an empty
        search to determine the possible values.
198
199
200

        The pagination parameters allows determine which page to return via the
        ``page`` and ``per_page`` parameters. Pagination however, is limited to the first
Markus Scheidgen's avatar
Markus Scheidgen committed
201
202
203
204
205
206
207
208
        100k (depending on ES configuration) hits.

        An alternative to pagination is to use ``scroll`` and ``scroll_id``. With ``scroll``
        you will get a ``scroll_id`` on the first request. Each call with ``scroll`` and
        the respective ``scroll_id`` will return the next ``per_page`` (here the default is 1000)
        results. Scroll however, ignores ordering and does not return aggregations.
        The scroll view used in the background will stay alive for 1 minute between requests.
        If the given ``scroll_id`` is not available anymore, a HTTP 400 is raised.
209
210
211
212
213

        The search will return aggregations on a predefined set of quantities. Aggregations
        will tell you what quantity values exist and how many entries match those values.

        Ordering is determined by ``order_by`` and ``order`` parameters.
214
        """
215
216

        try:
217
218
219
220
221
222
223
224
225
226
227
228
            args = {
                key: value for key, value in repo_request_parser.parse_args().items()
                if value is not None}

            scroll = args.get('scroll', False)
            scroll_id = args.get('scroll_id', None)
            page = args.get('page', 1)
            per_page = args.get('per_page', 10 if not scroll else 1000)
            order = args.get('order', -1)
            order_by = args.get('order_by', 'formula')

            date_histogram = args.get('date_histogram', False)
229
            metrics: List[str] = request.args.getlist('metrics')
Markus Scheidgen's avatar
Markus Scheidgen committed
230

231
232
233
234
235
236
237
238
239
            with_datasets = args.get('datasets', False)
            with_statistics = args.get('statistics', False)
        except Exception as e:
            abort(400, message='bad parameters: %s' % str(e))

        search_request = search.SearchRequest()
        add_query(search_request, args)
        if date_histogram:
            search_request.date_histogram()
240

241
        try:
242
            assert page >= 1
243
            assert per_page >= 0
244
245
246
        except AssertionError:
            abort(400, message='invalid pagination')

247
248
249
        if order not in [-1, 1]:
            abort(400, message='invalid pagination')

250
251
        for metric in metrics:
            if metric not in search.metrics_names:
252
253
                abort(400, message='there is no metric %s' % metric)

Markus Scheidgen's avatar
Markus Scheidgen committed
254
255
256
257
258
259
260
261
        if with_statistics:
            search_request.default_statistics(metrics_to_use=metrics)
            if 'datasets' not in metrics:
                total_metrics = metrics + ['datasets']
            else:
                total_metrics = metrics
            search_request.totals(metrics_to_use=total_metrics)
            search_request.statistic('authors', 1000)
262

263
        try:
264
            if scroll:
265
                results = search_request.execute_scrolled(scroll_id=scroll_id, size=per_page)
266

267
            else:
Markus Scheidgen's avatar
Markus Scheidgen committed
268
269
270
271
272
                if with_datasets:
                    search_request.quantity(
                        'dataset_id', size=per_page, examples=1,
                        after=request.args.get('datasets_after', None))

273
274
                results = search_request.execute_paginated(
                    per_page=per_page, page=page, order=order, order_by=order_by)
275
276

                # TODO just a work around to make things prettier
Markus Scheidgen's avatar
Markus Scheidgen committed
277
278
279
280
281
282
283
284
                if with_statistics:
                    statistics = results['statistics']
                    if 'code_name' in statistics and 'currupted mainfile' in statistics['code_name']:
                        del(statistics['code_name']['currupted mainfile'])

                if with_datasets:
                    datasets = results.pop('quantities')['dataset_id']
                    results['datasets'] = datasets
285
286

            return results, 200
Markus Scheidgen's avatar
Markus Scheidgen committed
287
288
        except search.ScrollIdNotFound:
            abort(400, 'The given scroll_id does not exist.')
289
        except KeyError as e:
290
291
            import traceback
            traceback.print_exc()
292
            abort(400, str(e))
293

294
295
296
297
298
299
300
301

query_model_parameters = {
    'owner': fields.String(description='Specify which calcs to return: ``all``, ``public``, ``user``, ``staging``, default is ``all``'),
    'from_time': RFC3339DateTime(description='A yyyy-MM-ddTHH:mm:ss (RFC3339) minimum entry time (e.g. upload time)'),
    'until_time': RFC3339DateTime(description='A yyyy-MM-ddTHH:mm:ss (RFC3339) maximum entry time (e.g. upload time)')
}

for quantity in search.quantities.values():
302
    if quantity.multi and quantity.argparse_action is None:
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
        def field(**kwargs):
            return fields.List(fields.String(**kwargs))
    else:
        field = fields.String
    query_model_parameters[quantity.name] = field(description=quantity.description)

repo_query_model = api.model('RepoQuery', query_model_parameters, skip_none=True)


def repo_edit_action_field(quantity):
    if quantity.is_scalar:
        return fields.Nested(repo_edit_action_model, description=quantity.description, skip_none=True)
    else:
        return fields.List(
            fields.Nested(repo_edit_action_model, skip_none=True), description=quantity.description)


repo_edit_action_model = api.model('RepoEditAction', {
    'value': fields.String(description='The value/values that is set as a string.'),
    'success': fields.Boolean(description='If this can/could be done. Only in API response.'),
    'message': fields.String(descriptin='A message that details the action result. Only in API response.')
})

repo_edit_model = api.model('RepoEdit', {
    'verify': fields.Boolean(description='If true, no action is performed.'),
    'query': fields.Nested(repo_query_model, skip_none=True, description='New metadata will be applied to query results.'),
    'actions': fields.Nested(
        api.model('RepoEditActions', {
            quantity.name: repo_edit_action_field(quantity)
            for quantity in UserMetadata.m_def.all_quantities.values()
        }), skip_none=True,
        description='Each action specifies a single value (even for multi valued quantities).')
})


@ns.route('/edit')
class EditRepoCalcsResource(Resource):
340
341
342
    @api.doc('edit_repo')
    @api.response(400, 'Invalid requests, e.g. wrong owner type or bad search parameters')
    @api.expect(repo_edit_model)
343
    @api.marshal_with(repo_edit_model, skip_none=True, code=200, description='Edit verified/performed')
344
345
346
347
    @authenticate()
    @with_logger
    def post(self, logger):
        """ Edit repository metadata. """
348
349

        # basic body parsing and some semantic checks
350
351
352
353
354
355
356
357
358
359
        json_data = request.get_json()
        if json_data is None:
            json_data = {}
        query = json_data.get('query', {})

        owner = query.get('owner', 'user')
        if owner not in ['user', 'staging']:
            abort(400, 'Not a valid owner for edit %s. Edit can only be performed in user or staging' % owner)
        query['owner'] = owner

360
361
362
363
        if 'actions' not in json_data:
            abort(400, 'Missing key actions in edit data')
        actions = json_data['actions']
        verify = json_data.get('verify', False)
364

365
        # checking the edit actions and preparing a mongo update on the fly
366
        mongo_update = {}
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
        for action_quantity_name, quantity_actions in actions.items():
            quantity = UserMetadata.m_def.all_quantities.get(action_quantity_name)
            if quantity is None:
                abort(400, 'Unknown quantity %s' % action_quantity_name)

            quantity_flask = quantity.m_x('flask', {})
            if quantity_flask.get('admin_only', False):
                if not g.user.is_admin():
                    abort(404, 'Only the admin user can set %s' % quantity.name)

            if quantity.name == 'Embargo':
                abort(400, 'Cannot raise an embargo, you can only lift the embargo')

            if isinstance(quantity_actions, list) == quantity.is_scalar:
                abort(400, 'Wrong shape for quantity %s' % action_quantity_name)

            if not isinstance(quantity_actions, list):
                quantity_actions = [quantity_actions]

            flask_verify = quantity_flask.get('verify', None)
            has_error = False
            for action in quantity_actions:
                action['success'] = True
                action['message'] = None
                action_value = action['value'].strip()
                if action_value == '':
                    mongo_value = None

                elif flask_verify == datamodel.User:
                    try:
                        mongo_value = User.get(email=action_value).user_id
                    except KeyError:
                        action['success'] = False
                        has_error = True
                        action['message'] = 'User does not exist'
                        continue

                elif flask_verify == datamodel.Dataset:
                    try:
                        mongo_value = Dataset.m_def.m_x('me').get(
                            user_id=g.user.user_id, name=action_value).dataset_id
                    except KeyError:
                        action['message'] = 'Dataset does not exist and will be created'
                        mongo_value = None
                        if not verify:
                            dataset = Dataset(
                                dataset_id=utils.create_uuid(), user_id=g.user.user_id,
                                name=action_value)
                            dataset.m_x('me').create()
                            mongo_value = dataset.dataset_id

                else:
                    mongo_value = action_value

                mongo_key = 'metadata__%s' % quantity.name
                if len(quantity.shape) == 0:
                    mongo_update[mongo_key] = mongo_value
                else:
                    mongo_values = mongo_update.setdefault(mongo_key, [])
                    if mongo_value is not None:
                        mongo_values.append(mongo_value)

        # stop here, if client just wants to verify its actions
        if verify:
            return json_data, 200

        # stop if the action were not ok
        if has_error:
            return json_data, 400

        # get all calculations that have to change
438
439
440
441
442
443
444
        parsed_query = {}
        for quantity_name, quantity in search.quantities.items():
            if quantity_name in query:
                value = query[quantity_name]
                if quantity.multi and quantity.argparse_action == 'split' and not isinstance(value, list):
                    value = value.split(',')
                parsed_query[quantity_name] = value
445
        parsed_query['owner'] = owner
446

447
        search_request = search.SearchRequest()
448
        add_query(search_request, parsed_query)
449
450
        calc_ids = list(hit['calc_id'] for hit in search_request.execute_scan())

451
        # perform the update on the mongo db
452
453
454
455
        n_updated = proc.Calc.objects(calc_id__in=calc_ids).update(multi=True, **mongo_update)
        if n_updated != len(calc_ids):
            logger.error('edit repo did not update all entries', payload=json_data)

456
        # re-index the affected entries in elastic search
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
        def elastic_updates():
            for calc in proc.Calc.objects(calc_id__in=calc_ids):
                entry = search.Entry.from_calc_with_metadata(
                    datamodel.CalcWithMetadata(**calc['metadata']))
                entry = entry.to_dict(include_meta=True)
                entry['_op_type'] = 'index'
                yield entry

        _, failed = elasticsearch.helpers.bulk(
            infrastructure.elastic_client, elastic_updates(), stats_only=True)
        search.refresh()
        if failed > 0:
            logger.error(
                'edit repo with failed elastic updates',
                payload=json_data, nfailed=len(failed))

473
        return json_data, 200
474

475

476
477
478
479
480
repo_quantity_model = api.model('RepoQuantity', {
    'after': fields.String(description='The after value that can be used to retrieve the next set of values.'),
    'values': fields.Raw(description='A dict with values as key. Values are dicts with "total" and "examples" keys.')
})

481
repo_quantity_values_model = api.model('RepoQuantityValues', {
482
483
484
485
486
    'quantity': fields.Nested(repo_quantity_model, allow_null=True)
})

repo_quantities_model = api.model('RepoQuantities', {
    'quantities': fields.List(fields.Nested(repo_quantity_model))
487
488
489
490
491
492
})

repo_quantity_search_request_parser = api.parser()
add_common_parameters(repo_quantity_search_request_parser)
repo_quantity_search_request_parser.add_argument(
    'after', type=str, help='The after value to use for "scrolling".')
493
repo_quantity_search_request_parser.add_argument(
494
495
496
    'size', type=int, help='The max size of the returned values.')


497
@ns.route('/quantity/<string:quantity>')
498
499
500
501
502
class RepoQuantityResource(Resource):
    @api.doc('quantity_search')
    @api.response(400, 'Invalid requests, e.g. wrong owner type, bad quantity, bad search parameters')
    @api.expect(repo_quantity_search_request_parser, validate=True)
    @api.marshal_with(repo_quantity_values_model, skip_none=True, code=200, description='Search results send')
503
    @authenticate()
504
505
506
507
508
509
510
511
512
513
514
515
516
517
    def get(self, quantity: str):
        """
        Retrieve quantity values from entries matching the search.

        You can use the various quantities to search/filter for. For some of the
        indexed quantities this endpoint returns aggregation information. This means
        you will be given a list of all possible values and the number of entries
        that have the certain value. You can also use these aggregations on an empty
        search to determine the possible values.

        There is no ordering and no pagination. Instead there is an 'after' key based
        scrolling. The result will contain an 'after' value, that can be specified
        for the next request. You can use the 'size' and 'after' parameters accordingly.

518
519
520
        The result will contain a 'quantity' key with quantity values and the "after"
        value. There will be upto 'size' many values. For the rest of the values use the
        "after" parameter in another request.
521
522
        """

523
        search_request = search.SearchRequest()
524
525
526
527
        args = {
            key: value
            for key, value in repo_quantity_search_request_parser.parse_args().items()
            if value is not None}
528

529
530
531
        add_query(search_request, args)
        after = args.get('after', None)
        size = args.get('size', 100)
532
533
534
535
536
537

        try:
            assert size >= 0
        except AssertionError:
            abort(400, message='invalid size')

538
        search_request.quantity(quantity, size=size, after=after)
539
540

        try:
541
542
543
            results = search_request.execute()
            quantities = results.pop('quantities')
            results['quantity'] = quantities[quantity]
544
545
546
547
548
549

            return results, 200
        except KeyError as e:
            import traceback
            traceback.print_exc()
            abort(400, 'Given quantity does not exist: %s' % str(e))
550
551


552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
repo_quantities_search_request_parser = api.parser()
add_common_parameters(repo_quantities_search_request_parser)
repo_quantities_search_request_parser.add_argument(
    'quantities', type=str, action='append',
    help='The quantities to retrieve values from')
repo_quantities_search_request_parser.add_argument(
    'size', type=int, help='The max size of the returned values.')


@ns.route('/quantities')
class RepoQuantitiesResource(Resource):
    @api.doc('quantities_search')
    @api.response(400, 'Invalid requests, e.g. wrong owner type, bad quantity, bad search parameters')
    @api.expect(repo_quantities_search_request_parser, validate=True)
    @api.marshal_with(repo_quantities_model, skip_none=True, code=200, description='Search results send')
    @authenticate()
    def get(self):
        """
        Retrieve quantity values for multiple quantities at once.

        You can use the various quantities to search/filter for. For some of the
        indexed quantities this endpoint returns aggregation information. This means
        you will be given a list of all possible values and the number of entries
        that have the certain value. You can also use these aggregations on an empty
        search to determine the possible values.

        There is no ordering and no pagination and not after key based scrolling. Instead
        there is an 'after' key based scrolling.

        The result will contain a 'quantities' key with a dict of quantity names and the
        retrieved values as values.
        """

        search_request = search.SearchRequest()
        args = {
            key: value
            for key, value in repo_quantities_search_request_parser.parse_args().items()
            if value is not None}

        add_query(search_request, args)
        quantities = args.get('quantities', [])
        size = args.get('size', 5)

        print('A ', quantities)
        try:
            assert size >= 0
        except AssertionError:
            abort(400, message='invalid size')

        for quantity in quantities:
            try:
                search_request.quantity(quantity, size=size)
            except KeyError as e:
                import traceback
                traceback.print_exc()
                abort(400, 'Given quantity does not exist: %s' % str(e))

        return search_request.execute(), 200


612
613
614
615
616
@ns.route('/pid/<int:pid>')
class RepoPidResource(Resource):
    @api.doc('resolve_pid')
    @api.response(404, 'Entry with PID does not exist')
    @api.marshal_with(repo_calc_id_model, skip_none=True, code=200, description='Entry resolved')
Markus Scheidgen's avatar
Markus Scheidgen committed
617
    @authenticate()
618
    def get(self, pid: int):
619
620
621
622
        search_request = search.SearchRequest()

        if g.user is not None:
            search_request.owner('all', user_id=g.user.user_id)
623
        else:
624
625
626
627
628
629
630
631
632
633
634
            search_request.owner('all')

        search_request.search_parameter('pid', pid)

        results = list(search_request.execute_scan())
        total = len(results)

        if total == 0:
            abort(404, 'Entry with PID %d does not exist' % pid)

        if total > 1:
635
            utils.get_logger(__name__).error('Two entries for the same pid', pid=pid)
636
637
638
639
640

        result = results[0]
        return dict(
            upload_id=result['upload_id'],
            calc_id=result['calc_id'])