diff --git a/nomad/app/v1/routers/entries.py b/nomad/app/v1/routers/entries.py
index b565c4bf63c7abc607afc59b2d9229911f8c3cb4..1d7de71a6fdac8c0b17e448788af2032bc26fef4 100644
--- a/nomad/app/v1/routers/entries.py
+++ b/nomad/app/v1/routers/entries.py
@@ -1030,10 +1030,9 @@ async def get_entry_raw_download_file(
     return StreamingResponse(raw_file_content, media_type=mime_type)
 
 
-def _answer_entry_archive_request(entry_id: str, required: ArchiveRequired, user: User):
+def answer_entry_archive_request(query: Dict[str, Any], required: ArchiveRequired, user: User):
     required_reader = _validate_required(required)
 
-    query = dict(calc_id=entry_id)
     response = perform_search(
         owner=Owner.visible, query=query,
         required=MetadataRequired(include=['entry_id', 'upload_id', 'parser_name']),
@@ -1042,9 +1041,10 @@ def _answer_entry_archive_request(entry_id: str, required: ArchiveRequired, user
     if response.pagination.total == 0:
         raise HTTPException(
             status_code=status.HTTP_404_NOT_FOUND,
-            detail='The entry with the given id does not exist or is not visible to you.')
+            detail='The entry does not exist or is not visible to you.')
 
     entry_metadata = response.data[0]
+    entry_id = entry_metadata['entry_id']
 
     uploads = _Uploads()
     try:
@@ -1053,7 +1053,7 @@ def _answer_entry_archive_request(entry_id: str, required: ArchiveRequired, user
         except KeyError:
             raise HTTPException(
                 status_code=status.HTTP_404_NOT_FOUND,
-                detail='The entry with the given id does exist, but it has no archive.')
+                detail='The entry does exist, but it has no archive.')
 
         return {
             'entry_id': entry_id,
@@ -1082,7 +1082,7 @@ async def get_entry_archive(
     '''
     Returns the full archive for the given `entry_id`.
     '''
-    return _answer_entry_archive_request(entry_id=entry_id, required='*', user=user)
+    return answer_entry_archive_request(dict(entry_id=entry_id), required='*', user=user)
 
 
 @router.get(
@@ -1096,7 +1096,7 @@ async def get_entry_archive_download(
     '''
     Returns the full archive for the given `entry_id`.
     '''
-    response = _answer_entry_archive_request(entry_id=entry_id, required='*', user=user)
+    response = answer_entry_archive_request(dict(entry_id=entry_id), required='*', user=user)
     return response['data']['archive']
 
 
@@ -1116,7 +1116,7 @@ async def post_entry_archive_query(
     Returns a partial archive for the given `entry_id` based on the `required` specified
     in the body.
     '''
-    return _answer_entry_archive_request(entry_id=entry_id, required=data.required, user=user)
+    return answer_entry_archive_request(dict(entry_id=entry_id), required=data.required, user=user)
 
 
 def edit(query: Query, user: User, mongo_update: Dict[str, Any] = None, re_index=True) -> List[str]:
diff --git a/nomad/app/v1/routers/uploads.py b/nomad/app/v1/routers/uploads.py
index 057e3cf30424d4810edbb4b84dbc651dfe0e0bcf..85994309c44129bcb33360027a7060925a1a144c 100644
--- a/nomad/app/v1/routers/uploads.py
+++ b/nomad/app/v1/routers/uploads.py
@@ -38,6 +38,7 @@ from .auth import create_user_dependency, generate_upload_token
 from ..models import (
     MetadataPagination, User, Direction, Pagination, PaginationResponse, HTTPExceptionModel,
     Files, files_parameters, WithQuery, MetadataEditRequest)
+from .entries import EntryArchiveResponse, answer_entry_archive_request
 from ..utils import (
     parameter_dependency_from_model, create_responses, DownloadItem,
     create_download_stream_zipped, create_download_stream_raw_file, create_stream_from_string)
@@ -46,6 +47,7 @@ router = APIRouter()
 default_tag = 'uploads'
 metadata_tag = 'uploads/metadata'
 raw_tag = 'uploads/raw'
+archive_tag = 'uploads/archive'
 action_tag = 'uploads/action'
 bundle_tag = 'uploads/bundle'
 
@@ -822,6 +824,56 @@ async def delete_upload_raw_path(
     return UploadProcDataResponse(upload_id=upload_id, data=_upload_to_pydantic(upload))
 
 
+@router.get(
+    '/{upload_id}/entries/mainfile/{mainfile:path}/archive', tags=[archive_tag],
+    summary='Get the full archive for the given upload and mainfile path.',
+    response_model=EntryArchiveResponse,
+    response_model_exclude_unset=True,
+    response_model_exclude_none=True,
+    responses=create_responses(_upload_or_path_not_found, _not_authorized_to_upload))
+async def get_upload_entry_archive_mainfile(
+        upload_id: str = Path(
+            ...,
+            description='The unique id of the upload.'),
+        mainfile: str = Path(
+            ...,
+            description='The mainfile path within the upload\'s raw files.'),
+        user: User = Depends(create_user_dependency(required=False))):
+    '''
+    For the upload specified by `upload_id`, gets the full archive of a single entry that
+    is identified by the given `mainfile`.
+    '''
+    _get_upload_with_read_access(upload_id, user, include_others=True)
+    return answer_entry_archive_request(
+        dict(upload_id=upload_id, mainfile=mainfile),
+        required='*', user=user)
+
+
+@router.get(
+    '/{upload_id}/entries/{entry_id}/archive', tags=[archive_tag],
+    summary='Get the full archive for the given upload and entry.',
+    response_model=EntryArchiveResponse,
+    response_model_exclude_unset=True,
+    response_model_exclude_none=True,
+    responses=create_responses(_upload_or_path_not_found, _not_authorized_to_upload))
+async def get_upload_entry_archive(
+        upload_id: str = Path(
+            ...,
+            description='The unique id of the upload.'),
+        entry_id: str = Path(
+            ...,
+            description='The unique entry id.'),
+        user: User = Depends(create_user_dependency(required=False))):
+    '''
+    For the upload specified by `upload_id`, gets the full archive of a single entry that
+    is identified by the given `entry_id`.
+    '''
+    _get_upload_with_read_access(upload_id, user, include_others=True)
+    return answer_entry_archive_request(
+        dict(upload_id=upload_id, entry_id=entry_id),
+        required='*', user=user)
+
+
 @router.post(
     '', tags=[default_tag],
     summary='Submit a new upload',
diff --git a/tests/app/v1/routers/test_uploads.py b/tests/app/v1/routers/test_uploads.py
index 8db69d0d7ab02b952dcbfc114035fad3aec2a227..6cb7064675ccc80af3416edb344dff6e2bd43b8d 100644
--- a/tests/app/v1/routers/test_uploads.py
+++ b/tests/app/v1/routers/test_uploads.py
@@ -38,6 +38,8 @@ from nomad.files import UploadFiles, StagingUploadFiles, PublicUploadFiles
 from nomad.datamodel import EntryMetadata
 from nomad.search import search
 
+from .test_entries import assert_archive_response
+
 '''
 These are the tests for all API operations below ``uploads``. The tests are organized
 using the following type of methods: fixtures, ``perfrom_*_test``, ``assert_*``, and
@@ -839,6 +841,42 @@ def test_get_upload_raw_path(
                             assert found, f'Missing expected path in zip file: {expected_path}'
 
 
+@pytest.mark.parametrize('upload_id, mainfile, user, status_code', [
+    pytest.param('id_published', 'test_content/subdir/test_entry_01/mainfile.json', None, 200, id='published'),
+    pytest.param('id_published', 'test_content/doesnotexist.json', None, 404, id='bad-mainfile'),
+    pytest.param('id_doesnotexist', 'test_content/subdir/test_entry_01/mainfile.json', None, 404, id='bad-upload-id'),
+    pytest.param('id_unpublished', 'test_content/id_unpublished_1/mainfile.json', None, 401, id='unpublished'),
+    pytest.param('id_unpublished', 'test_content/id_unpublished_1/mainfile.json', 'test_user', 200, id='auth')
+])
+def test_get_upload_entry_archive_mainfile(
+    client, example_data, test_auth_dict,
+    upload_id: str, mainfile: str, user: str, status_code: int
+):
+    user_auth, _ = test_auth_dict[user]
+    response = client.get(f'uploads/{upload_id}/entries/mainfile/{mainfile}/archive', headers=user_auth)
+    assert_response(response, status_code)
+    if status_code == 200:
+        assert_archive_response(response.json())
+
+
+@pytest.mark.parametrize('upload_id, entry_id, user, status_code', [
+    pytest.param('id_published', 'id_01', None, 200, id='published'),
+    pytest.param('id_published', 'doesnotexist', None, 404, id='bad-entry-id'),
+    pytest.param('id_doesnotexist', 'id_01', None, 404, id='bad-upload-id'),
+    pytest.param('id_unpublished', 'id_unpublished_1', None, 401, id='unpublished'),
+    pytest.param('id_unpublished', 'id_unpublished_1', 'test_user', 200, id='auth')
+])
+def test_get_upload_entry_archive(
+    client, example_data, test_auth_dict,
+    upload_id: str, entry_id: str, user: str, status_code: int
+):
+    user_auth, _ = test_auth_dict[user]
+    response = client.get(f'uploads/{upload_id}/entries/{entry_id}/archive', headers=user_auth)
+    assert_response(response, status_code)
+    if status_code == 200:
+        assert_archive_response(response.json())
+
+
 @pytest.mark.parametrize('mode, user, upload_id, source_path, target_path, query_args, accept_json, use_upload_token, expected_status_code, expected_mainfiles', [
     pytest.param(
         'stream', None, 'examples_template', example_file_aux, '', {'file_name': 'blah.aux'},