upload.py 24.2 KB
Newer Older
Markus Scheidgen's avatar
Markus Scheidgen committed
1
2
3
4
#
# Copyright The NOMAD Authors.
#
# This file is part of NOMAD. See https://nomad-lab.eu for further info.
5
6
7
8
9
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
Markus Scheidgen's avatar
Markus Scheidgen committed
10
#     http://www.apache.org/licenses/LICENSE-2.0
11
12
#
# Unless required by applicable law or agreed to in writing, software
Markus Scheidgen's avatar
Markus Scheidgen committed
13
# distributed under the License is distributed on an "AS IS" BASIS,
14
15
16
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Markus Scheidgen's avatar
Markus Scheidgen committed
17
#
18

19
'''
20
21
The upload API of the nomad@FAIRDI APIs. Provides endpoints to upload files and
get the processing status of uploads.
22
'''
23

24
from typing import Dict, Any
25
from flask import g, request, Response
26
27
from flask_restplus import Resource, fields, abort
from datetime import datetime
28
29
from werkzeug.datastructures import FileStorage
import os.path
30
import os
31
import io
32
from functools import wraps
33

34
from nomad import config, utils, files, search, datamodel
35
from nomad.processing import Upload, FAILURE
36
from nomad.processing import ProcessAlreadyRunning
37
38
from nomad.app import common
from nomad.app.common import RFC3339DateTime
39

Markus Scheidgen's avatar
Markus Scheidgen committed
40
from .api import api
41
from .auth import authenticate, generate_upload_token
42
from .common import pagination_request_parser, pagination_model, upload_route, metadata_model
43

44

45
ns = api.namespace(
46
    'uploads',
47
48
49
50
51
    description='Uploading data and tracing uploaded data and its processing.')

proc_model = api.model('Processing', {
    'tasks': fields.List(fields.String),
    'current_task': fields.String,
52
    'tasks_running': fields.Boolean,
53
    'tasks_status': fields.String,
54
55
    'errors': fields.List(fields.String),
    'warnings': fields.List(fields.String),
56
57
    'create_time': RFC3339DateTime,
    'complete_time': RFC3339DateTime,
58
59
60
61
62
63
    'current_process': fields.String,
    'process_running': fields.Boolean,
})

calc_metadata_model = api.inherit('CalcMetaData', metadata_model, {
    'mainfile': fields.String(description='The calculation main output file is used to identify the calculation in the upload.'),
64
    '_pid': fields.String(description='Assign a specific pid. It must be unique.'),
65
    'external_id': fields.String(description='External user provided id. Does not have to be unique necessarily.')
66
67
68
})

upload_metadata_model = api.inherit('UploadMetaData', metadata_model, {
69
    'embargo_length': fields.Integer(description='Length of the requested embargo in months.'),
70
    'calculations': fields.List(fields.Nested(model=calc_metadata_model, skip_none=True), description='Specific per calculation data that will override the upload data.')
71
72
})

73
upload_model = api.inherit('UploadProcessing', proc_model, {
74
75
76
77
    'name': fields.String(
        description='The name of the upload. This can be provided during upload '
                    'using the name query parameter.'),
    'upload_id': fields.String(
78
        description='The unique id for the upload.'),
79
    # TODO just removed during migration, where this get particularily large
80
    # 'metadata': fields.Nested(model=upload_metadata_model, description='Additional upload and calculation meta data.', skip_none=True),
81
    'upload_path': fields.String(description='The uploaded file on the server'),
82
    'published': fields.Boolean(description='If this upload is already published'),
83
    'upload_time': RFC3339DateTime(),
84
85
})

86
upload_list_model = api.model('UploadList', {
87
    'pagination': fields.Nested(model=pagination_model, skip_none=True),
88
    'results': fields.List(fields.Nested(model=upload_model, skip_none=True))
89
90
})

91
calc_model = api.inherit('UploadCalculationProcessing', proc_model, {
92
    'calc_id': fields.String,
93
94
    'mainfile': fields.String,
    'upload_id': fields.String,
95
    'parser': fields.String,
96
97
98
    'metadata': fields.Raw(
        attribute='_entry_metadata',
        description='The repository metadata for this entry.')
99
100
101
102
103
104
105
})

upload_with_calcs_model = api.inherit('UploadWithPaginatedCalculations', upload_model, {
    'processed_calcs': fields.Integer,
    'total_calcs': fields.Integer,
    'failed_calcs': fields.Integer,
    'pending_calcs': fields.Integer,
106
107
    'calcs': fields.Nested(model=api.model('UploadPaginatedCalculations', {
        'pagination': fields.Nested(model=api.inherit('UploadCalculationPagination', pagination_model, {
108
109
            'successes': fields.Integer,
            'failures': fields.Integer,
110
        }), skip_none=True),
111
112
        'results': fields.List(fields.Nested(model=calc_model, skip_none=True))
    }), skip_none=True)
113
114
})

115
116
upload_operation_model = api.model('UploadOperation', {
    'operation': fields.String(description='Currently publish is the only operation.'),
117
    'metadata': fields.Nested(model=upload_metadata_model, description='Additional upload and calculation meta data. Will replace previously given metadata.')
118
119
120
121
122
123
})


upload_metadata_parser = api.parser()
upload_metadata_parser.add_argument('name', type=str, help='An optional name for the upload.', location='args')
upload_metadata_parser.add_argument('local_path', type=str, help='Use a local file on the server.', location='args')
124
upload_metadata_parser.add_argument('token', type=str, help='Upload token to authenticate with curl command.', location='args')
125
upload_metadata_parser.add_argument('file', type=FileStorage, help='The file to upload.', location='files')
126
127
128
upload_metadata_parser.add_argument('oasis_upload_id', type=str, help='Use if this is an upload from an OASIS to the central NOMAD and set it to the upload_id.', location='args')
upload_metadata_parser.add_argument('oasis_uploader_id', type=str, help='Use if this is an upload from an OASIS to the central NOMAD and set it to the uploader\' id.', location='args')
upload_metadata_parser.add_argument('oasis_deployment_id', type=str, help='Use if this is an upload from an OASIS to the central NOMAD and set it to the OASIS\' deployment id.', location='args')
129

Markus Scheidgen's avatar
Markus Scheidgen committed
130

131
upload_list_parser = pagination_request_parser.copy()
132
upload_list_parser.add_argument('state', type=str, help='List uploads with given state: all, unpublished, published.', location='args')
133
134
upload_list_parser.add_argument('name', type=str, help='Filter for uploads with the given name.', location='args')

135

136
137
138
139
140
141
142
143
144
145
146
147
def disable_marshalling(f):
    @wraps(f)
    def wrapper(*args, **kwargs):
        try:
            return f(*args, **kwargs)
        except DisableMarshalling as e:
            return e.un_marshalled

    return wrapper


def marshal_with(*args, **kwargs):
148
    '''
149
150
    A special version of the RESTPlus marshal_with decorator that allows to disable
    marshalling at runtime by raising DisableMarshalling.
151
    '''
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
    def decorator(func):
        @api.marshal_with(*args, **kwargs)
        def with_marshalling(*args, **kwargs):
            return func(*args, **kwargs)

        @wraps(with_marshalling)
        def wrapper(*args, **kwargs):
            try:
                return with_marshalling(*args, **kwargs)
            except DisableMarshalling as e:
                return e.un_marshalled

        return wrapper
    return decorator


class DisableMarshalling(Exception):
    def __init__(self, body, status, headers):
        super().__init__()
        self.un_marshalled = Response(body, status=status, headers=headers)


174
@ns.route('/')
175
class UploadListResource(Resource):
176
    @api.doc('get_uploads')
177
    @api.response(400, 'Bad parameters')
178
    @api.marshal_with(upload_list_model, skip_none=True, code=200, description='Uploads send')
179
    @api.expect(upload_list_parser)
180
    @authenticate(required=True)
181
    def get(self):
182
        ''' Get the list of all uploads from the authenticated user. '''
183
        try:
184
            state = request.args.get('state', 'unpublished')
185
186
187
188
189
190
191
192
193
194
195
196
            name = request.args.get('name', None)
            page = int(request.args.get('page', 1))
            per_page = int(request.args.get('per_page', 10))
        except Exception:
            abort(400, message='bad parameter types')

        try:
            assert page >= 1
            assert per_page > 0
        except AssertionError:
            abort(400, message='invalid pagination')

197
        query_kwargs = {}
198
199
200
        if state == 'published':
            query_kwargs.update(published=True)
        elif state == 'unpublished':
201
            query_kwargs.update(published=False)
202
203
204
205
206
        elif state == 'all':
            pass
        else:
            abort(400, message='bad state value %s' % state)

207
208
        if name is not None:
            query_kwargs.update(name=name)
209
210
211
212
213
214

        uploads = Upload.user_uploads(g.user, **query_kwargs)
        total = uploads.count()

        results = [
            upload
Markus Scheidgen's avatar
Markus Scheidgen committed
215
            for upload in uploads.order_by('published', '-upload_time')[(page - 1) * per_page: page * per_page]]
216
217
218
219

        return dict(
            pagination=dict(total=total, page=page, per_page=per_page),
            results=results), 200
220

221
    @api.doc('upload')
222
    @api.expect(upload_metadata_parser)
223
    @api.response(400, 'To many uploads')
224
    @marshal_with(upload_model, skip_none=True, code=200, description='Upload received')
225
    @authenticate(required=True, upload_token=True)
226
    def put(self):
227
        '''
Markus Scheidgen's avatar
Markus Scheidgen committed
228
229
230
231
232
233
234
235
236
        Upload a file and automatically create a new upload in the process.
        Can be used to upload files via browser or other http clients like curl.
        This will also start the processing of the upload.

        There are two basic ways to upload a file: multipart-formdata or simply streaming
        the file data. Both are supported. The later one does not allow to transfer a
        filename or other meta-data. If a filename is available, it will become the
        name of the upload.

237
        Example commands:
Markus Scheidgen's avatar
Markus Scheidgen committed
238

239
240
            curl -X put ".../nomad/api/uploads/" -F file=@local_file
            curl ".../nomad/api/uploads/" --upload-file local_file
241
242
243

        There is a general limit on how many unpublished uploads a user can have. Will
        return 400 if this limit is exceeded.
244
        '''
245
        # check existence of local_path if local_path is used
246
        local_path = request.args.get('local_path')
247
248
249
250
        if local_path:
            if not os.path.exists(local_path):
                abort(404, message='The given local_path was not found.')

251
252
253
254
255
        # check the upload limit
        if not g.user.is_admin:
            if Upload.user_uploads(g.user, published=False).count() >= config.services.upload_limit:
                abort(400, 'Limit of unpublished uploads exceeded for user.')

Markus Scheidgen's avatar
Markus Scheidgen committed
256
257
        # check if allowed to perform oasis upload
        oasis_upload_id = request.args.get('oasis_upload_id')
258
259
        oasis_uploader_id = request.args.get('oasis_uploader_id')
        oasis_deployment_id = request.args.get('oasis_deployment_id')
260
        user = g.user
Markus Scheidgen's avatar
Markus Scheidgen committed
261
        from_oasis = oasis_upload_id is not None
262
        if from_oasis:
Markus Scheidgen's avatar
Markus Scheidgen committed
263
264
            if not g.user.is_oasis_admin:
                abort(401, 'Only an oasis admin can perform an oasis upload.')
265
266
            if oasis_uploader_id is None:
                abort(400, 'You must provide the original uploader for an oasis upload.')
267
268
            if oasis_deployment_id is None:
                abort(400, 'You must provide the oasis deployment id for an oasis upload.')
269
270
271
            user = datamodel.User.get(user_id=oasis_uploader_id)
            if user is None:
                abort(400, 'The given original uploader does not exist.')
272
273
        elif oasis_uploader_id is not None or oasis_deployment_id is not None:
            abort(400, 'For an oasis upload you must provide an oasis_upload_id.')
Markus Scheidgen's avatar
Markus Scheidgen committed
274

275
        upload_name = request.args.get('name')
Markus Scheidgen's avatar
Markus Scheidgen committed
276
277
278
279
280
281
282
283
284
        if oasis_upload_id is not None:
            upload_id = oasis_upload_id
            try:
                Upload.get(upload_id)
                abort(400, 'An oasis upload with the given upload_id already exists.')
            except KeyError:
                pass
        else:
            upload_id = utils.create_uuid()
Markus Scheidgen's avatar
Markus Scheidgen committed
285

286
        logger = common.logger.bind(upload_id=upload_id, upload_name=upload_name)
287
        logger.info('upload created', )
Markus Scheidgen's avatar
Markus Scheidgen committed
288

289
290
291
        try:
            if local_path:
                # file is already there and does not to be received
292
                upload_path = local_path
293
            elif request.mimetype in ['multipart/form-data', 'application/multipart-formdata']:
294
                logger.info('receive upload as multipart formdata')
295
                upload_path = files.PathObject(config.fs.tmp, upload_id).os_path
296
297
                # multipart formdata, e.g. with curl -X put "url" -F file=@local_file
                # might have performance issues for large files: https://github.com/pallets/flask/issues/2086
298
                if 'file' not in request.files:
299
300
                    abort(400, message='Bad multipart-formdata, there is no file part.')
                file = request.files['file']
301
302
                if upload_name is None or upload_name is '':
                    upload_name = file.filename
303

304
                file.save(upload_path)
305
306
            else:
                # simple streaming data in HTTP body, e.g. with curl "url" -T local_file
307
                logger.info('started to receive upload streaming data')
308
                upload_path = files.PathObject(config.fs.tmp, upload_id).os_path
309
310

                try:
311
                    with open(upload_path, 'wb') as f:
Markus Scheidgen's avatar
Markus Scheidgen committed
312
313
                        received_data = 0
                        received_last = 0
314
                        while True:
Markus Scheidgen's avatar
Markus Scheidgen committed
315
                            data = request.stream.read(io.DEFAULT_BUFFER_SIZE)
316
317
318
                            if len(data) == 0:
                                break

Markus Scheidgen's avatar
Markus Scheidgen committed
319
320
                            received_data += len(data)
                            received_last += len(data)
Markus Scheidgen's avatar
Markus Scheidgen committed
321
                            if received_last > 1e9:
Markus Scheidgen's avatar
Markus Scheidgen committed
322
323
                                received_last = 0
                                # TODO remove this logging or reduce it to debug
324
                                logger.info('received streaming data', size=received_data)
Markus Scheidgen's avatar
Markus Scheidgen committed
325
                            f.write(data)
326
327
328
329
330

                except Exception as e:
                    logger.warning('Error on streaming upload', exc_info=e)
                    abort(400, message='Some IO went wrong, download probably aborted/disrupted.')
        except Exception as e:
331
332
            if not local_path and os.path.isfile(upload_path):
                os.remove(upload_path)
333
334
            logger.info('Invalid or aborted upload')
            raise e
Markus Scheidgen's avatar
Markus Scheidgen committed
335
336

        logger.info('received uploaded file')
337
338
339

        upload = Upload.create(
            upload_id=upload_id,
340
            user=user,
341
            name=upload_name,
342
            upload_time=datetime.utcnow(),
343
            upload_path=upload_path,
Markus Scheidgen's avatar
Markus Scheidgen committed
344
            temporary=local_path != upload_path,
345
346
            from_oasis=from_oasis,
            oasis_deployment_id=oasis_deployment_id)
347

348
        upload.process_upload()
Markus Scheidgen's avatar
Markus Scheidgen committed
349
350
        logger.info('initiated processing')

351
        if bool(request.args.get('token', False)) and request.headers.get('Accept', '') != 'application/json':
352
353
354
355
356
            raise DisableMarshalling(
                '''
Thanks for uploading your data to nomad.
Go back to %s and press reload to see the progress on your upload and publish your data.

357
''' % config.gui_url(),
358
                200, {'Content-Type': 'text/plain; charset=utf-8'})
Markus Scheidgen's avatar
Markus Scheidgen committed
359
360

        return upload, 200
361

Markus Scheidgen's avatar
Markus Scheidgen committed
362

363
364
365
366
class ProxyUpload:
    def __init__(self, upload, calcs):
        self.upload = upload
        self.calcs = calcs
367

368
369
370
371
    def __getattr__(self, name):
        return self.upload.__getattribute__(name)


372
@upload_route(ns)
373
class UploadResource(Resource):
374
    @api.doc('get_upload')
375
    @api.response(404, 'Upload does not exist')
376
    @api.response(400, 'Invalid parameters')
377
    @api.marshal_with(upload_with_calcs_model, skip_none=True, code=200, description='Upload send')
378
    @api.expect(pagination_request_parser)
379
    @authenticate(required=True)
380
    def get(self, upload_id: str):
381
        '''
382
383
384
385
        Get an update for an existing upload.

        Will not only return the upload, but also its calculations paginated.
        Use the pagination params to determine the page.
386
        '''
387
        try:
388
            upload = Upload.get(upload_id)
389
390
391
        except KeyError:
            abort(404, message='Upload with id %s does not exist.' % upload_id)

392
        if upload.user_id != str(g.user.user_id) and not g.user.is_admin:
393
394
395
396
397
            abort(404, message='Upload with id %s does not exist.' % upload_id)

        try:
            page = int(request.args.get('page', 1))
            per_page = int(request.args.get('per_page', 10))
398
            order_by = request.args.get('order_by', None)
399
400
401
402
403
404
405
406
407
408
            order = int(str(request.args.get('order', -1)))
        except Exception:
            abort(400, message='invalid pagination or ordering')

        try:
            assert page >= 1
            assert per_page > 0
        except AssertionError:
            abort(400, message='invalid pagination')

409
410
411
412
        if order_by is not None:
            order_by = str(order_by)
            if order_by not in ['mainfile', 'tasks_status', 'parser']:
                abort(400, message='invalid order_by field %s' % order_by)
413

414
            order_by = ('-%s' if order == -1 else '+%s') % order_by
415

416
417
418
419
420
421
422
423
424
425
426
427
        # load upload's calcs
        calcs = list(upload.all_calcs(
            (page - 1) * per_page, page * per_page, order_by=order_by))

        calc_ids = [calc.calc_id for calc in calcs]
        search_results = {
            hit['calc_id']: hit
            for hit in search.SearchRequest().search_parameter('calc_id', calc_ids).execute_scan()}

        for calc in calcs:
            calc._entry_metadata = search_results.get(calc.calc_id)

428
        failed_calcs = upload.failed_calcs
429
        result = ProxyUpload(upload, {
430
431
432
            'pagination': dict(
                total=upload.total_calcs, page=page, per_page=per_page,
                successes=upload.processed_calcs - failed_calcs, failures=failed_calcs),
433
            'results': calcs
434
        })
435
436
437

        return result, 200

438
    @api.doc('delete_upload')
439
    @api.response(404, 'Upload does not exist')
440
    @api.response(401, 'Upload does not belong to authenticated user.')
441
    @api.response(400, 'The upload is still/already processed')
442
    @api.marshal_with(upload_model, skip_none=True, code=200, description='Upload deleted')
443
    @authenticate(required=True)
444
    def delete(self, upload_id: str):
445
        '''
446
        Delete an existing upload.
447

448
        Only uploads that are sill in staging, not already deleted, not still uploaded, and
449
        not currently processed, can be deleted.
450
        '''
451
        try:
452
            upload = Upload.get(upload_id)
453
454
455
        except KeyError:
            abort(404, message='Upload with id %s does not exist.' % upload_id)

456
        if upload.user_id != str(g.user.user_id) and not g.user.is_admin:
457
            abort(401, message='Upload with id %s does not belong to you.' % upload_id)
458

459
        if upload.published and not g.user.is_admin:
460
461
            abort(400, message='The upload is already published')

462
        if upload.tasks_running:
463
            abort(400, message='The upload is not processed yet')
464

465
466
467
        try:
            upload.delete_upload()
        except ProcessAlreadyRunning:
468
            abort(400, message='The upload is still processed')
469
        except Exception as e:
470
            common.logger.error('could not delete processing upload', exc_info=e)
471
            raise e
472
473

        return upload, 200
474

475
    @api.doc('exec_upload_operation')
476
    @api.response(404, 'Upload does not exist or not in staging')
477
    @api.response(400, 'Operation is not supported or the upload is still/already processed')
478
479
480
    @api.response(401, 'If the operation is not allowed for the current user')
    @api.marshal_with(upload_model, skip_none=True, code=200, description='Upload published successfully')
    @api.expect(upload_operation_model)
481
    @authenticate(required=True)
482
    def post(self, upload_id):
483
        '''
484
        Execute an upload operation. Available operations are ``publish`` and ``re-process``
485

486
        Publish accepts further meta data that allows to provide coauthors, comments,
487
        external references, etc. See the model for details. The fields that start with
488
        ``_underscore`` are only available for users with administrative privileges.
489

490
        Publish changes the visibility of the upload. Clients can specify the visibility
491
        via meta data.
492
493
494
495

        Re-process will re-process the upload and produce updated repository metadata and
        archive. Only published uploads that are not processing at the moment are allowed.
        Only for uploads where calculations have been processed with an older nomad version.
496
        '''
497
        try:
498
            upload = Upload.get(upload_id)
499
500
501
        except KeyError:
            abort(404, message='Upload with id %s does not exist.' % upload_id)

502
        if upload.user_id != str(g.user.user_id) and not g.user.is_admin:
503
            abort(404, message='Upload with id %s does not exist.' % upload_id)
504

505
506
507
        json_data = request.get_json()
        if json_data is None:
            json_data = {}
508

509
        operation = json_data.get('operation')
510

511
512
513
514
        user_metadata: Dict[str, Any] = json_data.get('metadata', {})
        metadata: Dict[str, Any] = {}
        for user_key in user_metadata:
            if user_key.startswith('_'):
515
                if not g.user.is_admin:
516
                    abort(401, message='Only admin users can use _metadata_keys.')
517
518
519
520
521
522

                key = user_key[1:]
            else:
                key = user_key

            metadata[key] = user_metadata[user_key]
523

524
        if operation == 'publish':
525
            if upload.tasks_running:
526
                abort(400, message='The upload is not processed yet')
527
            if upload.tasks_status == FAILURE:
528
                abort(400, message='Cannot publish an upload that failed processing')
529
530
            if upload.processed_calcs == 0:
                abort(400, message='Cannot publish an upload without calculations')
531
            try:
532
                upload.compress_and_set_metadata(metadata)
533
                upload.publish_upload()
534
535
            except ProcessAlreadyRunning:
                abort(400, message='The upload is still/already processed')
536

537
            return upload, 200
538
        elif operation == 're-process':
539
            if upload.tasks_running or upload.process_running or not upload.published:
540
541
542
543
544
545
546
547
                abort(400, message='Can only non processing, re-process published uploads')

            if len(metadata) > 0:
                abort(400, message='You can not provide metadata for re-processing')

            if len(upload.outdated_calcs) == 0:
                abort(400, message='You can only re-process uploads with at least one outdated calculation')

548
            upload.reset()
549
550
551
            upload.re_process_upload()

            return upload, 200
552

553
        abort(400, message='Unsupported operation %s.' % operation)
554
555
556
557


upload_command_model = api.model('UploadCommand', {
    'upload_url': fields.Url,
Markus Scheidgen's avatar
Markus Scheidgen committed
558
    'upload_command': fields.String,
559
    'upload_command_with_name': fields.String,
Markus Scheidgen's avatar
Markus Scheidgen committed
560
    'upload_progress_command': fields.String,
561
    'upload_command_form': fields.String,
Markus Scheidgen's avatar
Markus Scheidgen committed
562
    'upload_tar_command': fields.String
563
564
565
566
567
})


@ns.route('/command')
class UploadCommandResource(Resource):
568
    @api.doc('get_upload_command')
569
    @api.marshal_with(upload_command_model, code=200, description='Upload command send')
570
    @authenticate(required=True)
571
    def get(self):
572
        ''' Get url and example command for shell based uploads. '''
573
        token = generate_upload_token(g.user)
574
        upload_url = '%s/uploads/?token=%s' % (config.api_url(ssl=False), token)
575
        upload_url_with_name = upload_url + '&name=<name>'
576

577
        # upload_command = 'curl -X PUT "%s" -F file=@<local_file>' % upload_url
578
579
580

        # Upload via streaming data tends to work much easier, e.g. no mime type issues, etc.
        # It is also easier for the user to unterstand IMHO.
581
        upload_command = 'curl "%s" -T <local_file>' % upload_url
582

583
        upload_command_form = 'curl "%s" -X PUT -F file=@<local_file>' % upload_url
584

585
        upload_command_with_name = 'curl "%s" -X PUT -T <local_file>' % upload_url_with_name
586

587
        upload_progress_command = upload_command + ' | xargs echo'
588
        upload_tar_command = 'tar -cf - <local_folder> | curl -# -H "%s" -T - | xargs echo' % upload_url
589

590
591
592
        return dict(
            upload_url=upload_url,
            upload_command=upload_command,
593
            upload_command_with_name=upload_command_with_name,
594
            upload_progress_command=upload_progress_command,
595
            upload_command_form=upload_command_form,
596
            upload_tar_command=upload_tar_command), 200