upload.py 25.7 KB
Newer Older
Markus Scheidgen's avatar
Markus Scheidgen committed
1
2
3
4
#
# Copyright The NOMAD Authors.
#
# This file is part of NOMAD. See https://nomad-lab.eu for further info.
5
6
7
8
9
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
Markus Scheidgen's avatar
Markus Scheidgen committed
10
#     http://www.apache.org/licenses/LICENSE-2.0
11
12
#
# Unless required by applicable law or agreed to in writing, software
Markus Scheidgen's avatar
Markus Scheidgen committed
13
# distributed under the License is distributed on an "AS IS" BASIS,
14
15
16
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Markus Scheidgen's avatar
Markus Scheidgen committed
17
#
18

19
'''
20
21
The upload API of the nomad@FAIRDI APIs. Provides endpoints to upload files and
get the processing status of uploads.
22
'''
23

24
from typing import Dict, Any
25
from flask import g, request, Response
26
27
from flask_restplus import Resource, fields, abort
from datetime import datetime
28
29
from werkzeug.datastructures import FileStorage
import os.path
30
import os
31
import io
32
from functools import wraps
33

34
from nomad import config, utils, files, search, datamodel
35
from nomad.processing import Upload, FAILURE
36
from nomad.processing import ProcessAlreadyRunning
37
38
from nomad.app import common
from nomad.app.common import RFC3339DateTime
39

Markus Scheidgen's avatar
Markus Scheidgen committed
40
from .api import api
41
from .auth import authenticate, generate_upload_token
42
from .common import pagination_request_parser, pagination_model, upload_route, metadata_model
43

44

45
ns = api.namespace(
46
    'uploads',
47
48
49
50
51
    description='Uploading data and tracing uploaded data and its processing.')

proc_model = api.model('Processing', {
    'tasks': fields.List(fields.String),
    'current_task': fields.String,
52
    'tasks_running': fields.Boolean,
53
    'tasks_status': fields.String,
54
55
    'errors': fields.List(fields.String),
    'warnings': fields.List(fields.String),
56
57
    'create_time': RFC3339DateTime,
    'complete_time': RFC3339DateTime,
58
59
60
61
62
63
    'current_process': fields.String,
    'process_running': fields.Boolean,
})

calc_metadata_model = api.inherit('CalcMetaData', metadata_model, {
    'mainfile': fields.String(description='The calculation main output file is used to identify the calculation in the upload.'),
64
    '_pid': fields.String(description='Assign a specific pid. It must be unique.'),
65
    'external_id': fields.String(description='External user provided id. Does not have to be unique necessarily.')
66
67
68
})

upload_metadata_model = api.inherit('UploadMetaData', metadata_model, {
69
    'embargo_length': fields.Integer(description='Length of the requested embargo in months.'),
70
    'calculations': fields.List(fields.Nested(model=calc_metadata_model, skip_none=True), description='Specific per calculation data that will override the upload data.')
71
72
})

73
upload_model = api.inherit('UploadProcessing', proc_model, {
74
75
76
77
    'name': fields.String(
        description='The name of the upload. This can be provided during upload '
                    'using the name query parameter.'),
    'upload_id': fields.String(
78
        description='The unique id for the upload.'),
79
    # TODO just removed during migration, where this get particularily large
80
    # 'metadata': fields.Nested(model=upload_metadata_model, description='Additional upload and calculation meta data.', skip_none=True),
81
    'upload_path': fields.String(description='The uploaded file on the server'),
82
    'published': fields.Boolean(description='If this upload is already published'),
83
    'upload_time': RFC3339DateTime(),
84
85
    'last_status_message': fields.String(description='The last informative message that the processing saved about this uploads status.'),
    'published_to': fields.List(fields.String(), description='A list of other NOMAD deployments that this upload was uploaded to already.')
86
87
})

88
upload_list_model = api.model('UploadList', {
89
    'pagination': fields.Nested(model=pagination_model, skip_none=True),
90
    'results': fields.List(fields.Nested(model=upload_model, skip_none=True))
91
92
})

93
calc_model = api.inherit('UploadCalculationProcessing', proc_model, {
94
    'calc_id': fields.String,
95
96
    'mainfile': fields.String,
    'upload_id': fields.String,
97
    'parser': fields.String,
98
99
100
    'metadata': fields.Raw(
        attribute='_entry_metadata',
        description='The repository metadata for this entry.')
101
102
103
104
105
106
107
})

upload_with_calcs_model = api.inherit('UploadWithPaginatedCalculations', upload_model, {
    'processed_calcs': fields.Integer,
    'total_calcs': fields.Integer,
    'failed_calcs': fields.Integer,
    'pending_calcs': fields.Integer,
108
109
    'calcs': fields.Nested(model=api.model('UploadPaginatedCalculations', {
        'pagination': fields.Nested(model=api.inherit('UploadCalculationPagination', pagination_model, {
110
111
            'successes': fields.Integer,
            'failures': fields.Integer,
112
        }), skip_none=True),
113
114
        'results': fields.List(fields.Nested(model=calc_model, skip_none=True))
    }), skip_none=True)
115
116
})

117
118
upload_operation_model = api.model('UploadOperation', {
    'operation': fields.String(description='Currently publish is the only operation.'),
119
    'metadata': fields.Nested(model=upload_metadata_model, description='Additional upload and calculation meta data. Will replace previously given metadata.')
120
121
122
123
124
125
})


upload_metadata_parser = api.parser()
upload_metadata_parser.add_argument('name', type=str, help='An optional name for the upload.', location='args')
upload_metadata_parser.add_argument('local_path', type=str, help='Use a local file on the server.', location='args')
126
upload_metadata_parser.add_argument('token', type=str, help='Upload token to authenticate with curl command.', location='args')
127
upload_metadata_parser.add_argument('file', type=FileStorage, help='The file to upload.', location='files')
128
upload_metadata_parser.add_argument('publish_directly', type=bool, help='Set this parameter to publish the upload directly after processing.', location='args')
129
130
131
upload_metadata_parser.add_argument('oasis_upload_id', type=str, help='Use if this is an upload from an OASIS to the central NOMAD and set it to the upload_id.', location='args')
upload_metadata_parser.add_argument('oasis_uploader_id', type=str, help='Use if this is an upload from an OASIS to the central NOMAD and set it to the uploader\' id.', location='args')
upload_metadata_parser.add_argument('oasis_deployment_id', type=str, help='Use if this is an upload from an OASIS to the central NOMAD and set it to the OASIS\' deployment id.', location='args')
132
133


134
upload_list_parser = pagination_request_parser.copy()
135
upload_list_parser.add_argument('state', type=str, help='List uploads with given state: all, unpublished, published.', location='args')
136
137
upload_list_parser.add_argument('name', type=str, help='Filter for uploads with the given name.', location='args')

138

139
140
141
142
143
144
145
146
147
148
149
150
def disable_marshalling(f):
    @wraps(f)
    def wrapper(*args, **kwargs):
        try:
            return f(*args, **kwargs)
        except DisableMarshalling as e:
            return e.un_marshalled

    return wrapper


def marshal_with(*args, **kwargs):
151
    '''
152
153
    A special version of the RESTPlus marshal_with decorator that allows to disable
    marshalling at runtime by raising DisableMarshalling.
154
    '''
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
    def decorator(func):
        @api.marshal_with(*args, **kwargs)
        def with_marshalling(*args, **kwargs):
            return func(*args, **kwargs)

        @wraps(with_marshalling)
        def wrapper(*args, **kwargs):
            try:
                return with_marshalling(*args, **kwargs)
            except DisableMarshalling as e:
                return e.un_marshalled

        return wrapper
    return decorator


class DisableMarshalling(Exception):
    def __init__(self, body, status, headers):
        super().__init__()
        self.un_marshalled = Response(body, status=status, headers=headers)


177
@ns.route('/')
178
class UploadListResource(Resource):
179
    @api.doc('get_uploads')
180
    @api.response(400, 'Bad parameters')
181
    @api.marshal_with(upload_list_model, skip_none=True, code=200, description='Uploads send')
182
    @api.expect(upload_list_parser)
183
    @authenticate(required=True)
184
    def get(self):
185
        ''' Get the list of all uploads from the authenticated user. '''
186
        try:
187
            state = request.args.get('state', 'unpublished')
188
189
190
191
192
193
194
195
196
197
198
199
            name = request.args.get('name', None)
            page = int(request.args.get('page', 1))
            per_page = int(request.args.get('per_page', 10))
        except Exception:
            abort(400, message='bad parameter types')

        try:
            assert page >= 1
            assert per_page > 0
        except AssertionError:
            abort(400, message='invalid pagination')

200
        query_kwargs = {}
201
202
203
        if state == 'published':
            query_kwargs.update(published=True)
        elif state == 'unpublished':
204
            query_kwargs.update(published=False)
205
206
207
208
209
        elif state == 'all':
            pass
        else:
            abort(400, message='bad state value %s' % state)

210
211
        if name is not None:
            query_kwargs.update(name=name)
212
213
214
215
216
217

        uploads = Upload.user_uploads(g.user, **query_kwargs)
        total = uploads.count()

        results = [
            upload
Markus Scheidgen's avatar
Markus Scheidgen committed
218
            for upload in uploads.order_by('published', '-upload_time')[(page - 1) * per_page: page * per_page]]
219
220
221
222

        return dict(
            pagination=dict(total=total, page=page, per_page=per_page),
            results=results), 200
223

224
    @api.doc('upload')
225
    @api.expect(upload_metadata_parser)
226
    @api.response(400, 'To many uploads')
227
    @marshal_with(upload_model, skip_none=True, code=200, description='Upload received')
228
    @authenticate(required=True, upload_token=True, basic=True)
229
    def put(self):
230
        '''
Markus Scheidgen's avatar
Markus Scheidgen committed
231
232
233
234
235
236
237
238
239
        Upload a file and automatically create a new upload in the process.
        Can be used to upload files via browser or other http clients like curl.
        This will also start the processing of the upload.

        There are two basic ways to upload a file: multipart-formdata or simply streaming
        the file data. Both are supported. The later one does not allow to transfer a
        filename or other meta-data. If a filename is available, it will become the
        name of the upload.

240
        Example commands:
Markus Scheidgen's avatar
Markus Scheidgen committed
241

242
243
            curl -X put ".../nomad/api/uploads/" -F file=@local_file
            curl ".../nomad/api/uploads/" --upload-file local_file
244
245
246

        There is a general limit on how many unpublished uploads a user can have. Will
        return 400 if this limit is exceeded.
247
        '''
248
        # check existence of local_path if local_path is used
249
        local_path = request.args.get('local_path')
250
251
252
253
        if local_path:
            if not os.path.exists(local_path):
                abort(404, message='The given local_path was not found.')

254
255
256
257
258
        # check the upload limit
        if not g.user.is_admin:
            if Upload.user_uploads(g.user, published=False).count() >= config.services.upload_limit:
                abort(400, 'Limit of unpublished uploads exceeded for user.')

259
260
261
        # check if the upload is to be published directly
        publish_directly = request.args.get('publish_directly') is not None

Markus Scheidgen's avatar
Markus Scheidgen committed
262
263
        # check if allowed to perform oasis upload
        oasis_upload_id = request.args.get('oasis_upload_id')
264
265
        oasis_uploader_id = request.args.get('oasis_uploader_id')
        oasis_deployment_id = request.args.get('oasis_deployment_id')
266
        user = g.user
Markus Scheidgen's avatar
Markus Scheidgen committed
267
        from_oasis = oasis_upload_id is not None
268
        if from_oasis:
Markus Scheidgen's avatar
Markus Scheidgen committed
269
270
            if not g.user.is_oasis_admin:
                abort(401, 'Only an oasis admin can perform an oasis upload.')
271
272
            if oasis_uploader_id is None:
                abort(400, 'You must provide the original uploader for an oasis upload.')
273
274
            if oasis_deployment_id is None:
                abort(400, 'You must provide the oasis deployment id for an oasis upload.')
275
276
277
            user = datamodel.User.get(user_id=oasis_uploader_id)
            if user is None:
                abort(400, 'The given original uploader does not exist.')
278
279
        elif oasis_uploader_id is not None or oasis_deployment_id is not None:
            abort(400, 'For an oasis upload you must provide an oasis_upload_id.')
Markus Scheidgen's avatar
Markus Scheidgen committed
280

281
        upload_name = request.args.get('name')
Markus Scheidgen's avatar
Markus Scheidgen committed
282
283
284
285
286
287
288
289
290
        if oasis_upload_id is not None:
            upload_id = oasis_upload_id
            try:
                Upload.get(upload_id)
                abort(400, 'An oasis upload with the given upload_id already exists.')
            except KeyError:
                pass
        else:
            upload_id = utils.create_uuid()
Markus Scheidgen's avatar
Markus Scheidgen committed
291

292
        logger = common.logger.bind(upload_id=upload_id, upload_name=upload_name)
293
        logger.info('upload created', )
Markus Scheidgen's avatar
Markus Scheidgen committed
294

295
296
297
        try:
            if local_path:
                # file is already there and does not to be received
298
                upload_path = local_path
299
            elif request.mimetype in ['multipart/form-data', 'application/multipart-formdata']:
300
                logger.info('receive upload as multipart formdata')
301
                upload_path = files.PathObject(config.fs.tmp, upload_id).os_path
302
303
                # multipart formdata, e.g. with curl -X put "url" -F file=@local_file
                # might have performance issues for large files: https://github.com/pallets/flask/issues/2086
304
                if 'file' not in request.files:
305
306
                    abort(400, message='Bad multipart-formdata, there is no file part.')
                file = request.files['file']
307
308
                if upload_name is None or upload_name is '':
                    upload_name = file.filename
309

310
                file.save(upload_path)
311
312
            else:
                # simple streaming data in HTTP body, e.g. with curl "url" -T local_file
313
                logger.info('started to receive upload streaming data')
314
                upload_path = files.PathObject(config.fs.tmp, upload_id).os_path
315
316

                try:
317
                    with open(upload_path, 'wb') as f:
Markus Scheidgen's avatar
Markus Scheidgen committed
318
319
                        received_data = 0
                        received_last = 0
320
                        while True:
Markus Scheidgen's avatar
Markus Scheidgen committed
321
                            data = request.stream.read(io.DEFAULT_BUFFER_SIZE)
322
323
324
                            if len(data) == 0:
                                break

Markus Scheidgen's avatar
Markus Scheidgen committed
325
326
                            received_data += len(data)
                            received_last += len(data)
Markus Scheidgen's avatar
Markus Scheidgen committed
327
                            if received_last > 1e9:
Markus Scheidgen's avatar
Markus Scheidgen committed
328
329
                                received_last = 0
                                # TODO remove this logging or reduce it to debug
330
                                logger.info('received streaming data', size=received_data)
Markus Scheidgen's avatar
Markus Scheidgen committed
331
                            f.write(data)
332
333
334
335
336

                except Exception as e:
                    logger.warning('Error on streaming upload', exc_info=e)
                    abort(400, message='Some IO went wrong, download probably aborted/disrupted.')
        except Exception as e:
337
338
            if not local_path and os.path.isfile(upload_path):
                os.remove(upload_path)
339
340
            logger.info('Invalid or aborted upload')
            raise e
Markus Scheidgen's avatar
Markus Scheidgen committed
341
342

        logger.info('received uploaded file')
343
344
345

        upload = Upload.create(
            upload_id=upload_id,
346
            user=user,
347
            name=upload_name,
348
            upload_time=datetime.utcnow(),
349
            upload_path=upload_path,
Markus Scheidgen's avatar
Markus Scheidgen committed
350
            temporary=local_path != upload_path,
351
            publish_directly=publish_directly or from_oasis,
352
353
            from_oasis=from_oasis,
            oasis_deployment_id=oasis_deployment_id)
354

355
        upload.process_upload()
Markus Scheidgen's avatar
Markus Scheidgen committed
356
357
        logger.info('initiated processing')

358
        if bool(request.args.get('token', False)) and request.headers.get('Accept', '') != 'application/json':
359
360
361
362
363
            raise DisableMarshalling(
                '''
Thanks for uploading your data to nomad.
Go back to %s and press reload to see the progress on your upload and publish your data.

364
''' % config.gui_url(),
365
                200, {'Content-Type': 'text/plain; charset=utf-8'})
Markus Scheidgen's avatar
Markus Scheidgen committed
366
367

        return upload, 200
368

Markus Scheidgen's avatar
Markus Scheidgen committed
369

370
371
372
373
class ProxyUpload:
    def __init__(self, upload, calcs):
        self.upload = upload
        self.calcs = calcs
374

375
376
377
378
    def __getattr__(self, name):
        return self.upload.__getattribute__(name)


379
@upload_route(ns)
380
class UploadResource(Resource):
381
    @api.doc('get_upload')
382
    @api.response(404, 'Upload does not exist')
383
    @api.response(400, 'Invalid parameters')
384
    @api.marshal_with(upload_with_calcs_model, skip_none=True, code=200, description='Upload send')
385
    @api.expect(pagination_request_parser)
386
    @authenticate(required=True)
387
    def get(self, upload_id: str):
388
        '''
389
390
391
392
        Get an update for an existing upload.

        Will not only return the upload, but also its calculations paginated.
        Use the pagination params to determine the page.
393
        '''
394
        try:
395
            upload = Upload.get(upload_id)
396
397
398
        except KeyError:
            abort(404, message='Upload with id %s does not exist.' % upload_id)

399
        if upload.user_id != str(g.user.user_id) and not g.user.is_admin:
400
401
402
403
404
            abort(404, message='Upload with id %s does not exist.' % upload_id)

        try:
            page = int(request.args.get('page', 1))
            per_page = int(request.args.get('per_page', 10))
405
            order_by = request.args.get('order_by', None)
406
407
408
409
410
411
412
413
414
415
            order = int(str(request.args.get('order', -1)))
        except Exception:
            abort(400, message='invalid pagination or ordering')

        try:
            assert page >= 1
            assert per_page > 0
        except AssertionError:
            abort(400, message='invalid pagination')

416
417
418
419
        if order_by is not None:
            order_by = str(order_by)
            if order_by not in ['mainfile', 'tasks_status', 'parser']:
                abort(400, message='invalid order_by field %s' % order_by)
420

421
            order_by = ('-%s' if order == -1 else '+%s') % order_by
422

423
424
425
426
427
428
429
430
431
432
433
434
        # load upload's calcs
        calcs = list(upload.all_calcs(
            (page - 1) * per_page, page * per_page, order_by=order_by))

        calc_ids = [calc.calc_id for calc in calcs]
        search_results = {
            hit['calc_id']: hit
            for hit in search.SearchRequest().search_parameter('calc_id', calc_ids).execute_scan()}

        for calc in calcs:
            calc._entry_metadata = search_results.get(calc.calc_id)

435
        failed_calcs = upload.failed_calcs
436
        result = ProxyUpload(upload, {
437
438
439
            'pagination': dict(
                total=upload.total_calcs, page=page, per_page=per_page,
                successes=upload.processed_calcs - failed_calcs, failures=failed_calcs),
440
            'results': calcs
441
        })
442
443
444

        return result, 200

445
    @api.doc('delete_upload')
446
    @api.response(404, 'Upload does not exist')
447
    @api.response(401, 'Upload does not belong to authenticated user.')
448
    @api.response(400, 'The upload is still/already processed')
449
    @api.marshal_with(upload_model, skip_none=True, code=200, description='Upload deleted')
450
    @authenticate(required=True)
451
    def delete(self, upload_id: str):
452
        '''
453
        Delete an existing upload.
454

455
        Only uploads that are sill in staging, not already deleted, not still uploaded, and
456
        not currently processed, can be deleted.
457
        '''
458
        try:
459
            upload = Upload.get(upload_id)
460
461
462
        except KeyError:
            abort(404, message='Upload with id %s does not exist.' % upload_id)

463
        if upload.user_id != str(g.user.user_id) and not g.user.is_admin:
464
            abort(401, message='Upload with id %s does not belong to you.' % upload_id)
465

466
        if upload.published and not g.user.is_admin:
467
468
            abort(400, message='The upload is already published')

469
        if upload.tasks_running:
470
            abort(400, message='The upload is not processed yet')
471

472
473
474
        try:
            upload.delete_upload()
        except ProcessAlreadyRunning:
475
            abort(400, message='The upload is still processed')
476
        except Exception as e:
477
            common.logger.error('could not delete processing upload', exc_info=e)
478
            raise e
479
480

        return upload, 200
481

482
    @api.doc('exec_upload_operation')
483
    @api.response(404, 'Upload does not exist or not in staging')
484
    @api.response(400, 'Operation is not supported or the upload is still/already processed')
485
486
487
    @api.response(401, 'If the operation is not allowed for the current user')
    @api.marshal_with(upload_model, skip_none=True, code=200, description='Upload published successfully')
    @api.expect(upload_operation_model)
488
    @authenticate(required=True)
489
    def post(self, upload_id):
490
        '''
491
492
        Execute an upload operation. Available operations are ``publish``, ``re-process``,
        ``publish-to-central-nomad``.
493

494
        Publish accepts further meta data that allows to provide coauthors, comments,
495
        external references, etc. See the model for details. The fields that start with
496
        ``_underscore`` are only available for users with administrative privileges.
497

498
        Publish changes the visibility of the upload. Clients can specify the visibility
499
        via meta data.
500
501
502
503

        Re-process will re-process the upload and produce updated repository metadata and
        archive. Only published uploads that are not processing at the moment are allowed.
        Only for uploads where calculations have been processed with an older nomad version.
504
505
506

        Publish-to-central-nomad will upload the upload to the central NOMAD. This is only
        available on an OASIS. The upload must already be published on the OASIS.
507
        '''
508
        try:
509
            upload = Upload.get(upload_id)
510
511
512
        except KeyError:
            abort(404, message='Upload with id %s does not exist.' % upload_id)

513
        if upload.user_id != str(g.user.user_id) and not g.user.is_admin:
514
            abort(404, message='Upload with id %s does not exist.' % upload_id)
515

516
517
518
        json_data = request.get_json()
        if json_data is None:
            json_data = {}
519

520
        operation = json_data.get('operation')
521

522
523
524
525
        user_metadata: Dict[str, Any] = json_data.get('metadata', {})
        metadata: Dict[str, Any] = {}
        for user_key in user_metadata:
            if user_key.startswith('_'):
526
                if not g.user.is_admin:
527
                    abort(401, message='Only admin users can use _metadata_keys.')
528
529
530
531
532
533

                key = user_key[1:]
            else:
                key = user_key

            metadata[key] = user_metadata[user_key]
534

535
        if operation == 'publish':
536
            if upload.tasks_running:
537
                abort(400, message='The upload is not processed yet')
538
            if upload.tasks_status == FAILURE:
539
                abort(400, message='Cannot publish an upload that failed processing')
540
541
            if upload.processed_calcs == 0:
                abort(400, message='Cannot publish an upload without calculations')
542
            try:
543
                upload.compress_and_set_metadata(metadata)
544
                upload.publish_upload()
545
546
            except ProcessAlreadyRunning:
                abort(400, message='The upload is still/already processed')
547

548
            return upload, 200
549
        elif operation == 're-process':
550
            if upload.tasks_running or upload.process_running or not upload.published:
551
                abort(400, message='Can only re-process on non processing and published uploads')
552
553
554
555
556
557
558

            if len(metadata) > 0:
                abort(400, message='You can not provide metadata for re-processing')

            if len(upload.outdated_calcs) == 0:
                abort(400, message='You can only re-process uploads with at least one outdated calculation')

559
            upload.reset()
560
            upload.re_process_upload()
561
562
563
564
565
566
567
568
569
570
            return upload, 200
        elif operation == 'publish-to-central-nomad':
            if upload.tasks_running or upload.process_running or not upload.published:
                abort(400, message='Can only upload non processing and published uploads to central NOMAD.')

            if len(metadata) > 0:
                abort(400, message='You can not provide metadata for publishing to central NOMAD')

            if not config.keycloak.oasis:
                abort(400, message='This operation is only available on a NOMAD OASIS.')
571

572
            upload.publish_from_oasis()
573
            return upload, 200
574

575
        abort(400, message='Unsupported operation %s.' % operation)
576
577
578
579


upload_command_model = api.model('UploadCommand', {
    'upload_url': fields.Url,
Markus Scheidgen's avatar
Markus Scheidgen committed
580
    'upload_command': fields.String,
581
    'upload_command_with_name': fields.String,
Markus Scheidgen's avatar
Markus Scheidgen committed
582
    'upload_progress_command': fields.String,
583
    'upload_command_form': fields.String,
Markus Scheidgen's avatar
Markus Scheidgen committed
584
    'upload_tar_command': fields.String
585
586
587
588
589
})


@ns.route('/command')
class UploadCommandResource(Resource):
590
    @api.doc('get_upload_command')
591
    @api.marshal_with(upload_command_model, code=200, description='Upload command send')
592
    @authenticate(required=True)
593
    def get(self):
594
        ''' Get url and example command for shell based uploads. '''
595
        token = generate_upload_token(g.user)
596
597
        upload_url = ('%s/uploads/?token=%s' %
                      (config.api_url(ssl=config.services.https_upload), token))
598
        upload_url_with_name = upload_url + '&name=<name>'
599

600
        # upload_command = 'curl -X PUT "%s" -F file=@<local_file>' % upload_url
601
602
603

        # Upload via streaming data tends to work much easier, e.g. no mime type issues, etc.
        # It is also easier for the user to unterstand IMHO.
604
        upload_command = 'curl "%s" -T <local_file>' % upload_url
605

606
        upload_command_form = 'curl "%s" -X PUT -F file=@<local_file>' % upload_url
607

608
        upload_command_with_name = 'curl "%s" -X PUT -T <local_file>' % upload_url_with_name
609

610
        upload_progress_command = upload_command + ' | xargs echo'
611
        upload_tar_command = 'tar -cf - <local_folder> | curl -# -H "%s" -T - | xargs echo' % upload_url
612

613
614
615
        return dict(
            upload_url=upload_url,
            upload_command=upload_command,
616
            upload_command_with_name=upload_command_with_name,
617
            upload_progress_command=upload_progress_command,
618
            upload_command_form=upload_command_form,
619
            upload_tar_command=upload_tar_command), 200