diff --git a/docs/api.md b/docs/api.md index 04c5f5d2774b00c709edbec37bf3d6117427b9cf..291e4bf9f79dcf0018ecf6de7526efc111064dee 100644 --- a/docs/api.md +++ b/docs/api.md @@ -256,7 +256,7 @@ are: - `entries/query` - Query entries for metadata - `entries/archive/query` - Query entries for archive data -- `entries/{entry-id}/raw/download` - Download raw data for a specific entry +- `entries/{entry-id}/raw` - Download raw data for a specific entry - `uploads/{upload-id}/raw/path/to/file` - Download a specific file of an upload ## Common concepts @@ -392,7 +392,7 @@ files in one big zip-file. Here, you might want to use a program like *curl* to directly from the shell: ``` -curl "{{ nomad_url() }}/v1/entries/raw/download?results.material.elements=Ti&results.material.elements=O" -o download.zip +curl "{{ nomad_url() }}/v1/entries/raw?results.material.elements=Ti&results.material.elements=O" -o download.zip ``` ## Access archives diff --git a/gui/src/components/api.js b/gui/src/components/api.js index 536016331601835c6a7c97436c70c10e5021f208..a32cbac1be3de35a786db4a86630f7ed9a39c502 100644 --- a/gui/src/components/api.js +++ b/gui/src/components/api.js @@ -197,7 +197,7 @@ class Api { this.onStartLoading() const auth = await this.authHeaders() try { - const entry = await this.axios.get(`/entries/${entryId}/raw`, auth) + const entry = await this.axios.get(`/entries/${entryId}/rawdir`, auth) return entry.data } catch (errors) { handleApiError(errors) diff --git a/gui/src/components/entry/RawFiles.js b/gui/src/components/entry/RawFiles.js index 2899ff986450b9d2ead8e9f46836e70f9a3aac98..aae2eccab826067e191a954de338e6f8c2b1487d 100644 --- a/gui/src/components/entry/RawFiles.js +++ b/gui/src/components/entry/RawFiles.js @@ -153,7 +153,7 @@ export default function RawFiles({data, entryId}) { setShownFile(file) setFileContents(null) api.get( - `/entries/${entryId}/raw/download/${file.split('/').reverse()[0]}`, + `/entries/${entryId}/raw/${file.split('/').reverse()[0]}`, {length: 16 * 1024, decompress: true}, {transformResponse: []}) .then(contents => setFileContents({ @@ -174,7 +174,7 @@ export default function RawFiles({data, entryId}) { if (fileContents.contents.length < (page + 1) * 16 * 1024) { api.get( - `/entries/${entryId}/raw/download/${shownFile.split('/').reverse()[0]}`, + `/entries/${entryId}/raw/${shownFile.split('/').reverse()[0]}`, {offset: page * 16 * 1024, length: 16 * 1024, decompress: true}, {transformResponse: []}) .then(contents => { @@ -220,14 +220,14 @@ export default function RawFiles({data, entryId}) { let downloadUrl if (selectedFiles.length === 1) { // download the individual file - downloadUrl = `entries/${entryId}/raw/download/${file(selectedFiles[0])}` + downloadUrl = `entries/${entryId}/raw/${file(selectedFiles[0])}` } else if (selectedFiles.length === availableFiles.length) { // use an endpoint that downloads all files of the entry - downloadUrl = `entries/${entryId}/raw/download` + downloadUrl = `entries/${entryId}/raw` } else if (selectedFiles.length > 0) { // download specific files const query = selectedFiles.map(file).map(f => `include_files=${encodeURIComponent(f)}`).join('&') - downloadUrl = `entries/${entryId}/raw/download?${query}` + downloadUrl = `entries/${entryId}/raw?${query}` } return ( diff --git a/nomad/app/flask/dcat/mapping.py b/nomad/app/flask/dcat/mapping.py index 8d7acafb34240f6285ec7e87137f603275d73a9c..dd2ed965f90246d4f117dd179eb975c2e9c00df8 100644 --- a/nomad/app/flask/dcat/mapping.py +++ b/nomad/app/flask/dcat/mapping.py @@ -190,7 +190,7 @@ class Mapping(): dist = BNode() self.g.add((dist, RDF.type, DCAT.Distribution)) self.g.add((dist, DCT.title, Literal(get_optional_entry_prop(entry, 'formula') + '_raw'))) - self.g.add((dist, DCAT.accessURL, URIRef(f'https://nomad-lab.eu/prod/rae/api/v1/entries/{entry["entry_id"]}/raw/download'))) + self.g.add((dist, DCAT.accessURL, URIRef(f'https://nomad-lab.eu/prod/rae/api/v1/entries/{entry["entry_id"]}/raw'))) self.g.add((dist, DCAT.packageFormat, URIRef('https://www.iana.org/assignments/media-types/application/zip'))) return dist diff --git a/nomad/app/v1/routers/entries.py b/nomad/app/v1/routers/entries.py index c40e73fb4cff4bee23df005220338bda69805487..a3bf7bdac4a32ddcd0078e7dafc4862bd4ad786b 100644 --- a/nomad/app/v1/routers/entries.py +++ b/nomad/app/v1/routers/entries.py @@ -180,11 +180,11 @@ class EntriesArchiveDownload(WithQuery): files: Optional[Files] = Body(None) -class EntriesRaw(WithQuery): +class EntriesRawDir(WithQuery): pagination: Optional[MetadataPagination] = Body(None) -class EntriesRawDownload(WithQuery): +class EntriesRaw(WithQuery): files: Optional[Files] = Body( None, example={ @@ -192,26 +192,26 @@ class EntriesRawDownload(WithQuery): }) -class EntryRawFile(BaseModel): +class EntryRawDirFile(BaseModel): path: str = Field(None) size: int = Field(None) -class EntryRaw(BaseModel): +class EntryRawDir(BaseModel): entry_id: str = Field(None) upload_id: str = Field(None) mainfile: str = Field(None) - files: List[EntryRawFile] = Field(None) + files: List[EntryRawDirFile] = Field(None) -class EntriesRawResponse(EntriesRaw): +class EntriesRawDirResponse(EntriesRawDir): pagination: PaginationResponse = Field(None) # type: ignore - data: List[EntryRaw] = Field(None) + data: List[EntryRawDir] = Field(None) -class EntryRawResponse(BaseModel): +class EntryRawDirResponse(BaseModel): entry_id: str = Field(...) - data: EntryRaw = Field(...) + data: EntryRawDir = Field(...) class EntryArchive(BaseModel): @@ -296,14 +296,14 @@ _bad_edit_request_empty_query = status.HTTP_404_NOT_FOUND, { 'model': HTTPExceptionModel, 'description': strip('No matching entries found.')} -_raw_download_response = 200, { +_raw_response = 200, { 'content': {'application/zip': {}}, 'description': strip(''' A zip file with the requested raw files. The file is streamed. The content length is not known in advance. ''')} -_raw_download_file_response = 200, { +_raw_file_response = 200, { 'content': {'application/octet-stream': {}}, 'description': strip(''' A byte stream with raw file contents. The content length is not known in advance. @@ -462,7 +462,7 @@ class _Uploads(): self._upload_files.close() -def _create_entry_raw(entry_metadata: Dict[str, Any], uploads: _Uploads): +def _create_entry_rawdir(entry_metadata: Dict[str, Any], uploads: _Uploads): entry_id = entry_metadata['entry_id'] upload_id = entry_metadata['upload_id'] mainfile = entry_metadata['mainfile'] @@ -472,12 +472,12 @@ def _create_entry_raw(entry_metadata: Dict[str, Any], uploads: _Uploads): files = [] for path_info in upload_files.raw_directory_list(mainfile_dir, files_only=True): - files.append(EntryRawFile(path=path_info.path, size=path_info.size)) + files.append(EntryRawDirFile(path=path_info.path, size=path_info.size)) - return EntryRaw(entry_id=entry_id, upload_id=upload_id, mainfile=mainfile, files=files) + return EntryRawDir(entry_id=entry_id, upload_id=upload_id, mainfile=mainfile, files=files) -def _answer_entries_raw_request( +def _answer_entries_rawdir_request( owner: Owner, query: Query, pagination: MetadataPagination, user: User): if owner == Owner.all_: @@ -495,19 +495,19 @@ def _answer_entries_raw_request( uploads = _Uploads() try: response_data = [ - _create_entry_raw(entry_metadata, uploads) + _create_entry_rawdir(entry_metadata, uploads) for entry_metadata in search_response.data] finally: uploads.close() - return EntriesRawResponse( + return EntriesRawDirResponse( owner=search_response.owner, query=search_response.query, pagination=search_response.pagination, data=response_data) -def _answer_entries_raw_download_request(owner: Owner, query: Query, files: Files, user: User): +def _answer_entries_raw_request(owner: Owner, query: Query, files: Files, user: User): if owner == Owner.all_: raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=strip(''' The owner=all is not allowed for this operation as it will search for entries @@ -558,16 +558,15 @@ def _answer_entries_raw_download_request(owner: Owner, query: Query, files: File raise -_entries_raw_query_docstring = strip(''' +_entries_rawdir_query_docstring = strip(''' Will perform a search and return a *page* of raw file metadata for entries fulfilling the query. This allows you to get a complete list of all rawfiles with their full path in their respective upload and their sizes. The first returned files for each entry, is their respective *mainfile*. - Each entry on NOMAD represents a set of raw files. These are the input and output - files (as well as additional auxiliary files) in their original form, i.e. as - provided by the uploader. More specifically, an entry represents a code-run identified - by a certain *mainfile*. This is usually the main output file of the code. All other + Each entry on NOMAD has a set of raw files. These are the files in their original form, + i.e. as provided by the uploader. More specifically, an entry has a *mainfile*, identified as + parseable. For CMS entries, the mainfile is usually the main output file of the code. All other files in the same directory are considered the entries *auxiliary* no matter their role or if they were actually parsed by NOMAD. @@ -576,50 +575,49 @@ _entries_raw_query_docstring = strip(''' @router.post( - '/raw/query', + '/rawdir/query', tags=[raw_tag], summary='Search entries and get their raw files metadata', - description=_entries_raw_query_docstring, - response_model=EntriesRawResponse, + description=_entries_rawdir_query_docstring, + response_model=EntriesRawDirResponse, responses=create_responses(_bad_owner_response), response_model_exclude_unset=True, response_model_exclude_none=True) -async def post_entries_raw_query( - request: Request, data: EntriesRaw, user: User = Depends(create_user_dependency())): +async def post_entries_rawdir_query( + request: Request, data: EntriesRawDir, user: User = Depends(create_user_dependency())): - return _answer_entries_raw_request( + return _answer_entries_rawdir_request( owner=data.owner, query=data.query, pagination=data.pagination, user=user) @router.get( - '/raw', + '/rawdir', tags=[raw_tag], - summary='Search entries and get raw their raw files metadata', - description=_entries_raw_query_docstring, - response_model=EntriesRawResponse, + summary='Search entries and get their raw files metadata', + description=_entries_rawdir_query_docstring, + response_model=EntriesRawDirResponse, response_model_exclude_unset=True, response_model_exclude_none=True, responses=create_responses(_bad_owner_response)) -async def get_entries_raw( +async def get_entries_rawdir( request: Request, with_query: WithQuery = Depends(query_parameters), pagination: MetadataPagination = Depends(metadata_pagination_parameters), user: User = Depends(create_user_dependency())): - res = _answer_entries_raw_request( + res = _answer_entries_rawdir_request( owner=with_query.owner, query=with_query.query, pagination=pagination, user=user) res.pagination.populate_urls(request) return res -_entries_raw_download_query_docstring = strip(''' - This operation will perform a search and stream a .zip file with raw input and output - files of the found entries. +_entries_raw_query_docstring = strip(''' + This operation will perform a search and stream a .zip file with the raw files of the + found entries. - Each entry on NOMAD represents a set of raw files. These are the input and output - files (as well as additional auxiliary files) in their original form, i.e. as - provided by the uploader. More specifically, an entry represents a code-run identified - by a certain *mainfile*. This is usually the main output file of the code. All other + Each entry on NOMAD has a set of raw files. These are the files in their original form, + i.e. as provided by the uploader. More specifically, an entry has a *mainfile*, identified as + parseable. For CMS entries, the mainfile is usually the main output file of the code. All other files in the same directory are considered the entries *auxiliary* no matter their role or if they were actually parsed by NOMAD. @@ -633,32 +631,32 @@ _entries_raw_download_query_docstring = strip(''' @router.post( - '/raw/download/query', + '/raw/query', tags=[raw_tag], summary='Search entries and download their raw files', - description=_entries_raw_download_query_docstring, + description=_entries_raw_query_docstring, response_class=StreamingResponse, - responses=create_responses(_raw_download_response, _bad_owner_response)) -async def post_entries_raw_download_query( - data: EntriesRawDownload, user: User = Depends(create_user_dependency())): + responses=create_responses(_raw_response, _bad_owner_response)) +async def post_entries_raw_query( + data: EntriesRaw, user: User = Depends(create_user_dependency())): - return _answer_entries_raw_download_request( + return _answer_entries_raw_request( owner=data.owner, query=data.query, files=data.files, user=user) @router.get( - '/raw/download', + '/raw', tags=[raw_tag], summary='Search entries and download their raw files', - description=_entries_raw_download_query_docstring, + description=_entries_raw_query_docstring, response_class=StreamingResponse, - responses=create_responses(_raw_download_response, _bad_owner_response)) -async def get_entries_raw_download( + responses=create_responses(_raw_response, _bad_owner_response)) +async def get_entries_raw( with_query: WithQuery = Depends(query_parameters), files: Files = Depends(files_parameters), user: User = Depends(create_user_dependency(signature_token_auth_allowed=True))): - return _answer_entries_raw_download_request( + return _answer_entries_raw_request( owner=with_query.owner, query=with_query.query, files=files, user=user) @@ -913,14 +911,14 @@ async def get_entry_metadata( @router.get( - '/{entry_id}/raw', + '/{entry_id}/rawdir', tags=[raw_tag], summary='Get the raw files metadata for an entry by its id', - response_model=EntryRawResponse, + response_model=EntryRawDirResponse, responses=create_responses(_bad_id_response), response_model_exclude_unset=True, response_model_exclude_none=True) -async def get_entry_raw( +async def get_entry_rawdir( entry_id: str = Path(..., description='The unique entry id of the entry to retrieve raw data from.'), user: User = Depends(create_user_dependency())): ''' @@ -940,18 +938,18 @@ async def get_entry_raw( uploads = _Uploads() try: - return EntryRawResponse(entry_id=entry_id, data=_create_entry_raw(response.data[0], uploads)) + return EntryRawDirResponse(entry_id=entry_id, data=_create_entry_rawdir(response.data[0], uploads)) finally: uploads.close() @router.get( - '/{entry_id}/raw/download', + '/{entry_id}/raw', tags=[raw_tag], summary='Get the raw data of an entry by its id', response_class=StreamingResponse, - responses=create_responses(_bad_id_response, _raw_download_response)) -async def get_entry_raw_download( + responses=create_responses(_bad_id_response, _raw_response)) +async def get_entry_raw( entry_id: str = Path(..., description='The unique entry id of the entry to retrieve raw data from.'), files: Files = Depends(files_parameters), user: User = Depends(create_user_dependency(signature_token_auth_allowed=True))): @@ -969,16 +967,16 @@ async def get_entry_raw_download( status_code=status.HTTP_404_NOT_FOUND, detail='The entry with the given id does not exist or is not visible to you.') - return _answer_entries_raw_download_request(owner=Owner.visible, query=query, files=files, user=user) + return _answer_entries_raw_request(owner=Owner.visible, query=query, files=files, user=user) @router.get( - '/{entry_id}/raw/download/{path}', + '/{entry_id}/raw/{path}', tags=[raw_tag], summary='Get the raw data of an entry by its id', response_class=StreamingResponse, - responses=create_responses(_bad_id_response, _bad_path_response, _raw_download_file_response)) -async def get_entry_raw_download_file( + responses=create_responses(_bad_id_response, _bad_path_response, _raw_file_response)) +async def get_entry_raw_file( entry_id: str = Path(..., description='The unique entry id of the entry to retrieve raw data from.'), path: str = Path(..., description='A relative path to a file based on the directory of the entry\'s mainfile.'), offset: Optional[int] = QueryParameter( @@ -1077,7 +1075,7 @@ def answer_entry_archive_request(query: Dict[str, Any], required: ArchiveRequire response_model_exclude_none=True, responses=create_responses(_bad_id_response)) async def get_entry_archive( - entry_id: str = Path(..., description='The unique entry id of the entry to retrieve raw data from.'), + entry_id: str = Path(..., description='The unique entry id of the entry to retrieve archive data from.'), user: User = Depends(create_user_dependency())): ''' Returns the full archive for the given `entry_id`. @@ -1091,7 +1089,7 @@ async def get_entry_archive( summary='Get the archive for an entry by its id as plain archive json', responses=create_responses(_bad_id_response, _archive_download_response)) async def get_entry_archive_download( - entry_id: str = Path(..., description='The unique entry id of the entry to retrieve raw data from.'), + entry_id: str = Path(..., description='The unique entry id of the entry to retrieve archive data from.'), user: User = Depends(create_user_dependency(signature_token_auth_allowed=True))): ''' Returns the full archive for the given `entry_id`. @@ -1110,7 +1108,7 @@ async def get_entry_archive_download( responses=create_responses(_bad_id_response, _bad_archive_required_response)) async def post_entry_archive_query( data: EntryArchiveRequest, user: User = Depends(create_user_dependency()), - entry_id: str = Path(..., description='The unique entry id of the entry to retrieve raw data from.')): + entry_id: str = Path(..., description='The unique entry id of the entry to retrieve archive data from.')): ''' Returns a partial archive for the given `entry_id` based on the `required` specified diff --git a/nomad/cli/client/integrationtests.py b/nomad/cli/client/integrationtests.py index fe65b943280f09cc35273fa0d6b238772869d121..62bbf11f62824bd012fa708dd2aafc7020dccf24 100644 --- a/nomad/cli/client/integrationtests.py +++ b/nomad/cli/client/integrationtests.py @@ -156,7 +156,7 @@ def integrationtests(auth: api.Auth, skip_parsers: bool, skip_publish: bool, ski print('performing download') response = api.get( - 'entries/raw/download', + 'entries/raw', params=dict(upload_id=upload['upload_id'], owner='visible'), auth=auth) assert response.status_code == 200, response.text diff --git a/nomad/client/processing.py b/nomad/client/processing.py index 522ba939c01d2e8911c48eabca74bdf39475136f..85fa928412c514ee7ff1ec843f01217f41fc751f 100644 --- a/nomad/client/processing.py +++ b/nomad/client/processing.py @@ -143,7 +143,7 @@ class LocalEntryProcessing: # download raw if not already downloaded or if override is set print('Downloading', self.entry_id) response = self.__handle_response( - api.get(f'entries/{self.entry_id}/raw/download', auth=auth)) + api.get(f'entries/{self.entry_id}/raw', auth=auth)) with open(self.local_path, 'wb') as f: for chunk in response.iter_content(chunk_size=io.DEFAULT_BUFFER_SIZE): diff --git a/tests/app/v1/routers/test_entries.py b/tests/app/v1/routers/test_entries.py index db467964edf18c4a1ccc7547958d4e515057f6e5..b6b6ec5ddf30cca998e19fa667ffa5e45f52708b 100644 --- a/tests/app/v1/routers/test_entries.py +++ b/tests/app/v1/routers/test_entries.py @@ -52,7 +52,7 @@ def perform_entries_metadata_test(*args, **kwargs): return perform_metadata_test(*args, **kwargs) -def perform_entries_raw_download_test( +def perform_entries_raw_test( client, headers={}, query={}, owner=None, files={}, total=-1, files_per_entry=5, status_code=200, http_method='get'): @@ -64,14 +64,14 @@ def perform_entries_raw_download_test( body = {'query': query, 'files': files} if owner is not None: body['owner'] = owner - response = client.post('entries/raw/download/query', headers=headers, json=body) + response = client.post('entries/raw/query', headers=headers, json=body) elif http_method == 'get': params = dict(**query) params.update(**files) if owner is not None: params['owner'] = owner - response = client.get('entries/raw/download?%s' % urlencode(params, doseq=True), headers=headers) + response = client.get('entries/raw?%s' % urlencode(params, doseq=True), headers=headers) else: assert False @@ -83,7 +83,7 @@ def perform_entries_raw_download_test( compressed=files.get('compress', False)) -def perform_entries_raw_test( +def perform_entries_rawdir_test( client, owner=None, headers={}, status_code=200, total=None, http_method='get', files_per_entry=-1, **kwargs): @@ -98,13 +98,13 @@ def perform_entries_raw_test( for value in kwargs.values(): params.update(**value) response = client.get( - 'entries/raw?%s' % urlencode(params, doseq=True), headers=headers) + 'entries/rawdir?%s' % urlencode(params, doseq=True), headers=headers) elif http_method == 'post': body = dict(**kwargs) if owner is not None: body['owner'] = owner - response = client.post('entries/raw/query', headers=headers, json=body) + response = client.post('entries/rawdir/query', headers=headers, json=body) else: assert False @@ -118,7 +118,7 @@ def perform_entries_raw_test( if total is not None: assert response_json['pagination']['total'] == total - assert_entries_raw_response(response_json, files_per_entry=files_per_entry) + assert_entries_rawdir_response(response_json, files_per_entry=files_per_entry) return response_json @@ -228,19 +228,19 @@ def assert_raw_zip_file( assert all(key in manifest_keys for key in entry) -def assert_entries_raw_response(response_json, files_per_entry: int = -1): +def assert_entries_rawdir_response(response_json, files_per_entry: int = -1): assert 'data' in response_json for entry in response_json['data']: - assert_entry_raw(entry, files_per_entry) + assert_entry_rawdir(entry, files_per_entry) -def assert_entry_raw_response(response_json, files_per_entry: int = -1): +def assert_entry_rawdir_response(response_json, files_per_entry: int = -1): for key in ['entry_id', 'data']: assert key in response_json - assert_entry_raw(response_json['data'], files_per_entry=files_per_entry) + assert_entry_rawdir(response_json['data'], files_per_entry=files_per_entry) -def assert_entry_raw(data, files_per_entry: int = -1): +def assert_entry_rawdir(data, files_per_entry: int = -1): for key in ['upload_id', 'entry_id', 'files']: assert key in data files = data['files'] @@ -432,8 +432,8 @@ def test_entry_metadata(client, data, entry_id, required, status_code): pytest.param({program_name: 'DOESNOTEXIST'}, {}, 0, 5, 200, id='empty') ]) @pytest.mark.parametrize('http_method', ['post', 'get']) -def test_entries_raw(client, data, query, files, total, files_per_entry, status_code, http_method): - perform_entries_raw_test( +def test_entries_rawdir(client, data, query, files, total, files_per_entry, status_code, http_method): + perform_entries_rawdir_test( client, status_code=status_code, query=query, files=files, total=total, files_per_entry=files_per_entry, http_method=http_method) @@ -452,15 +452,15 @@ def test_entries_raw(client, data, query, files, total, files_per_entry, status_ pytest.param({}, {'include_files': ['1.aux', '2.aux']}, 23, 2, 200, id='files') ]) @pytest.mark.parametrize('http_method', ['post', 'get']) -def test_entries_download_raw(client, data, query, files, total, files_per_entry, status_code, http_method): - perform_entries_raw_download_test( +def test_entries_raw(client, data, query, files, total, files_per_entry, status_code, http_method): + perform_entries_raw_test( client, status_code=status_code, query=query, files=files, total=total, files_per_entry=files_per_entry, http_method=http_method) @pytest.mark.parametrize('http_method', ['post', 'get']) @pytest.mark.parametrize('test_method', [ - pytest.param(perform_entries_raw_download_test, id='raw-download'), + pytest.param(perform_entries_raw_test, id='raw'), pytest.param(perform_entries_archive_download_test, id='archive-download')]) def test_entries_download_max(monkeypatch, client, data, test_method, http_method): monkeypatch.setattr('nomad.config.max_entry_download', 20) @@ -472,11 +472,11 @@ def test_entries_download_max(monkeypatch, client, data, test_method, http_metho pytest.param('id_01', 5, 200, id='id'), pytest.param('id_embargo', -1, 404, id='404'), pytest.param('doesnotexist', -1, 404, id='404')]) -def test_entry_raw(client, data, entry_id, files_per_entry, status_code): - response = client.get('entries/%s/raw' % entry_id) +def test_entry_rawdir(client, data, entry_id, files_per_entry, status_code): + response = client.get('entries/%s/rawdir' % entry_id) assert_response(response, status_code) if status_code == 200: - assert_entry_raw_response(response.json(), files_per_entry=files_per_entry) + assert_entry_rawdir_response(response.json(), files_per_entry=files_per_entry) @pytest.mark.parametrize('entry_id, files, files_per_entry, status_code', [ @@ -489,8 +489,8 @@ def test_entry_raw(client, data, entry_id, files_per_entry, status_code): pytest.param('id_01', {'include_files': ['1.aux']}, 1, 200, id='file'), pytest.param('id_01', {'include_files': ['1.aux', '2.aux']}, 2, 200, id='files') ]) -def test_entry_raw_download(client, data, entry_id, files, files_per_entry, status_code): - response = client.get('entries/%s/raw/download?%s' % (entry_id, urlencode(files, doseq=True))) +def test_entry_raw(client, data, entry_id, files, files_per_entry, status_code): + response = client.get('entries/%s/raw?%s' % (entry_id, urlencode(files, doseq=True))) assert_response(response, status_code) if status_code == 200: assert_raw_zip_file( @@ -560,7 +560,7 @@ def example_data_with_compressed_files(elastic_module, raw_files_module, mongo_m pytest.param('id_embargo_w_coauthor_1', 'mainfile.json', {'user': 'other-test-user'}, 200, id='embargo-coauthor'), pytest.param('id_embargo_w_reviewer_1', 'mainfile.json', {'user': 'other-test-user'}, 200, id='embargo-reviewer') ]) -def test_entry_raw_download_file( +def test_entry_raw_file( client, data, example_data_with_compressed_files, example_mainfile_contents, test_user_auth, other_test_user_auth, entry_id, path, params, status_code): @@ -575,7 +575,7 @@ def test_entry_raw_download_file( headers = {} response = client.get( - f'entries/{entry_id}/raw/download/{path}?{urlencode(params, doseq=True)}', + f'entries/{entry_id}/raw/{path}?{urlencode(params, doseq=True)}', headers=headers) assert_response(response, status_code) @@ -670,8 +670,8 @@ n_elements = 'results.material.n_elements' ]) @pytest.mark.parametrize('test_method', [ pytest.param(perform_entries_metadata_test, id='metadata'), - pytest.param(perform_entries_raw_download_test, id='raw-download'), pytest.param(perform_entries_raw_test, id='raw'), + pytest.param(perform_entries_rawdir_test, id='rawdir'), pytest.param(perform_entries_archive_test, id='archive'), pytest.param(perform_entries_archive_download_test, id='archive-download')]) def test_entries_post_query(client, data, query, status_code, total, test_method): @@ -699,8 +699,8 @@ def test_entries_post_query(client, data, query, status_code, total, test_method pytest.param({'q': 'domain__dft'}, 200, 23, id='enum')]) @pytest.mark.parametrize('test_method', [ pytest.param(perform_entries_metadata_test, id='metadata'), - pytest.param(perform_entries_raw_download_test, id='raw-download'), pytest.param(perform_entries_raw_test, id='raw'), + pytest.param(perform_entries_rawdir_test, id='rawdir'), pytest.param(perform_entries_archive_test, id='archive'), pytest.param(perform_entries_archive_download_test, id='archive-download')]) def test_entries_get_query(client, data, query, status_code, total, test_method): @@ -732,8 +732,8 @@ def test_entries_get_query(client, data, query, status_code, total, test_method) @pytest.mark.parametrize('http_method', ['post', 'get']) @pytest.mark.parametrize('test_method', [ pytest.param(perform_entries_metadata_test, id='metadata'), - pytest.param(perform_entries_raw_download_test, id='raw-download'), pytest.param(perform_entries_raw_test, id='raw'), + pytest.param(perform_entries_rawdir_test, id='rawdir'), pytest.param(perform_entries_archive_test, id='archive'), pytest.param(perform_entries_archive_download_test, id='archive-download')]) def test_entries_owner( @@ -753,7 +753,7 @@ def test_entries_owner( @pytest.mark.parametrize('http_method', ['post', 'get']) @pytest.mark.parametrize('test_method', [ pytest.param(perform_entries_metadata_test, id='metadata'), - pytest.param(perform_entries_raw_test, id='raw'), + pytest.param(perform_entries_rawdir_test, id='rawdir'), pytest.param(perform_entries_archive_test, id='archive')]) def test_entries_pagination(client, data, pagination, response_pagination, status_code, http_method, test_method): response_json = test_method(