diff --git a/nomad/app/v1/models/graph/graph_models.py b/nomad/app/v1/models/graph/graph_models.py
index 151a02a94a8edc94e64fffed0c700f0e8425ead4..b3c833601ba9b1065c1cdf0df93a94110a658222 100644
--- a/nomad/app/v1/models/graph/graph_models.py
+++ b/nomad/app/v1/models/graph/graph_models.py
@@ -79,11 +79,6 @@ class GraphFile(BaseModel):
     parent: GraphDirectory
 
 
-class MDef(BaseModel):
-    m_def: str
-    m_def_id: str
-
-
 class MSection(BaseModel):
     m_errors: List[Error]
     m_request: RecursionOptions
@@ -91,6 +86,11 @@ class MSection(BaseModel):
     m_children: Any
 
 
+class MDef(MSection):
+    m_def: str  # type: ignore
+    m_def_id: str
+
+
 class GraphEntry(mapped(EntryProcData, mainfile='mainfile_path', entry_metadata=None)):  # type: ignore
     m_errors: List[Error]
     mainfile: GraphFile
diff --git a/nomad/app/v1/routers/graph.py b/nomad/app/v1/routers/graph.py
index 613ed8ce6a6dcbc5a5b727dc493732f6ed437ace..13a6817fc3df66aeec296bf6a3000afaa5c10c4a 100644
--- a/nomad/app/v1/routers/graph.py
+++ b/nomad/app/v1/routers/graph.py
@@ -52,6 +52,9 @@ def relocate_children(request):
 def reorder_children(query):
     if not isinstance(query, dict):
         return query
+    # do not touch folders
+    if 'm_is' in query:
+        return {k: reorder_children(v) for k, v in query.items()}
     return {
         k: reorder_children(v)
         for k, v in sorted(query.items(), key=lambda item: item[0])
diff --git a/nomad/cli/client/__init__.py b/nomad/cli/client/__init__.py
index 20ae743e2fd0f8e67ae4db10b9e7d9c6be467c31..9f51a0e79a931c0f5df60576985ea9d339740849 100644
--- a/nomad/cli/client/__init__.py
+++ b/nomad/cli/client/__init__.py
@@ -111,7 +111,8 @@ def datatests(ctx):
 
 @client.command(
     help='Upload files to nomad. The given path can be a single file or a directory. '
-    'All .zip files in a directory will be uploaded.'
+    'For file(s) that are not .zip or .tar files, a temporary .zip file '
+    'will be created and uploaded.'
 )
 @click.argument('PATH', nargs=-1, required=True, type=click.Path(exists=True))
 @click.option(
@@ -131,42 +132,79 @@ def datatests(ctx):
     default=False,
     help='Automatically move upload out of the staging area after successful processing',
 )
+@click.option(
+    '--ignore-path-prefix',
+    is_flag=True,
+    default=False,
+    help='Ignores common path prefixes when creating an upload.',
+)
 @click.pass_context
-def upload(ctx, path, upload_name: str, local_path: bool, publish: bool):
+def upload(
+    ctx,
+    path,
+    upload_name: str,
+    local_path: bool,
+    publish: bool,
+    ignore_path_prefix: bool,
+):
     import os
     import sys
+    import zipfile
+    import tempfile
 
     from nomad.client import upload_file as client_upload_file
 
     auth = _create_auth(ctx)
     paths = path
 
-    def upload_file(path, upload_name):
-        result = client_upload_file(
-            path, auth, upload_name=upload_name, local_path=local_path, publish=publish
-        )
-        if result is None:
-            sys.exit(1)
+    prefix = os.path.commonprefix(paths)
+    if not os.path.isdir(prefix):
+        prefix = os.path.dirname(prefix)
+    file_paths = []
+    zip_paths = []
+
+    def add_file(file):
+        _, ext = os.path.splitext(file)
+        if ext in ['.zip', '.tar', 'tgz', 'tar.gz']:
+            zip_paths.append(file)
+        else:
+            file_paths.append(file)
 
-    click.echo('uploading files from %s paths' % len(paths))
     for path in paths:
-        click.echo('uploading %s' % path)
         if os.path.isfile(path):
-            upload_name = (
-                upload_name if upload_name is not None else os.path.basename(path)
-            )
-            upload_file(path, upload_name)
+            add_file(path)
 
         elif os.path.isdir(path):
             for dirpath, _, filenames in os.walk(path):
                 for filename in filenames:
-                    if filename.endswith('.zip'):
-                        file_path = os.path.abspath(os.path.join(dirpath, filename))
-                        upload_name = os.path.basename(file_path)
-                        upload_file(file_path, upload_name)
+                    add_file(os.path.join(dirpath, filename))
+
+    with tempfile.NamedTemporaryFile(suffix='.zip', delete=False) as temp_zip:
+        click.echo(f'create temporary zipfile from {len(file_paths)} files')
+        with zipfile.ZipFile(temp_zip.name, 'w') as zip_file:
+            for file_path in file_paths:
+                if file_path.startswith(prefix) and ignore_path_prefix:
+                    arcname = file_path[len(prefix) :]
+                else:
+                    arcname = file_path
+                zip_file.write(file_path, arcname=arcname)
+        zip_paths.append(temp_zip.name)
+
+        for zip_path in zip_paths:
+            click.echo(f'uploading {zip_path}')
+            upload_name = (
+                upload_name if upload_name is not None else os.path.basename(path)
+            )
 
-        else:
-            click.echo('Unknown path type %s.' % path)
+            result = client_upload_file(
+                zip_path,
+                auth,
+                upload_name=upload_name,
+                local_path=local_path,
+                publish=publish,
+            )
+            if result is None:
+                sys.exit(1)
 
 
 @client.command(help='Run processing locally.')
diff --git a/nomad/cli/dev.py b/nomad/cli/dev.py
index a476ddd9aeca19fa75df8c9e3515f70953f9fa7c..e3048f02b14ebf9f6d2c8237837633395494c601 100644
--- a/nomad/cli/dev.py
+++ b/nomad/cli/dev.py
@@ -263,7 +263,13 @@ def gui_config():
 def _generate_example_upload_metadata():
     import yaml
 
-    with open('examples/data/uploads/example_uploads.yml') as infile:
+    with open(
+        os.path.join(
+            os.path.dirname(__file__),
+            '../../',
+            'examples/data/uploads/example_uploads.yml',
+        )
+    ) as infile:
         return yaml.load(infile, Loader=yaml.SafeLoader)
 
 
diff --git a/nomad/graph/graph_reader.py b/nomad/graph/graph_reader.py
index 4593f60f8ff73b8a2fa6fee199c9b84da304d3c8..ad5578202d3b0aae5d7510330bcb3577d1dba813 100644
--- a/nomad/graph/graph_reader.py
+++ b/nomad/graph/graph_reader.py
@@ -20,6 +20,7 @@ from __future__ import annotations
 import copy
 import dataclasses
 import functools
+import itertools
 import os
 import re
 from typing import Any, Callable, Type
@@ -44,6 +45,7 @@ from nomad.app.v1.routers.uploads import (
     UploadProcDataQuery,
     UploadProcDataPagination,
     EntryProcDataPagination,
+    RawDirPagination,
 )
 from nomad.archive import ArchiveList, ArchiveDict, to_json
 from nomad.graph.model import (
@@ -352,6 +354,16 @@ def _convert_ref_to_path_string(ref: str, upload_id: str = None) -> str:
     return '/'.join(_convert_ref_to_path(ref, upload_id))
 
 
+def _to_response_config(config: RequestConfig, exclude: list = None, **kwargs):
+    response_config = config.dict(exclude_unset=True, exclude_none=True)
+    response_config.pop('property_name', None)
+    if exclude:
+        for item in exclude:
+            response_config.pop(item, None)
+    response_config.update(kwargs)
+    return response_config
+
+
 def _populate_result(container_root: dict, path: list, value, *, path_like=False):
     """
     For the given path and the root of the target container, populate the value.
@@ -1361,6 +1373,15 @@ class MongoReader(GeneralReader):
             def __offload_walk(query_set, transformer):
                 response_path: list = node.current_path + [key, Token.RESPONSE]
 
+                if isinstance(value, dict) and GeneralReader.__CONFIG__ in value:
+                    _populate_result(
+                        node.result_root,
+                        response_path,
+                        _to_response_config(
+                            child_config, exclude=['query', 'pagination']
+                        ),
+                    )
+
                 query, filtered = query_set
                 if query is not None:
                     _populate_result(node.result_root, response_path + ['query'], query)
@@ -1882,6 +1903,23 @@ class FileSystemReader(GeneralReader):
 
         return response
 
+    @staticmethod
+    def _to_abs_path(rel_path: list) -> list:
+        abs_path: list = []
+        # condense the path
+        for p in rel_path:
+            for pp in p.split('/'):  # to consider '../../../'
+                if pp in ('.', ''):
+                    continue
+                if pp == '..':
+                    if abs_path:
+                        abs_path.pop()
+                elif pp == '...':
+                    abs_path = []
+                else:
+                    abs_path.append(pp)
+        return abs_path
+
     def _walk(
         self,
         node: GraphNode,
@@ -1900,7 +1938,7 @@ class FileSystemReader(GeneralReader):
             current_config = parent_config
 
         full_path: list = self._root_path + node.current_path
-        full_path_str: str = '/'.join(full_path)
+        full_path_str: str = '/'.join(self._to_abs_path(full_path))
         is_current_path_file: bool = node.archive.raw_path_is_file(full_path_str)
 
         if not is_current_path_file:
@@ -1921,7 +1959,9 @@ class FileSystemReader(GeneralReader):
 
             child_path: list = node.current_path + [key]
 
-            if not node.archive.raw_path_exists('/'.join(self._root_path + child_path)):
+            if not node.archive.raw_path_exists(
+                '/'.join(self._to_abs_path(self._root_path + child_path))
+            ):
                 continue
 
             self._walk(
@@ -1943,23 +1983,11 @@ class FileSystemReader(GeneralReader):
     ):
         # at the point, it is guaranteed that the current path exists, but it could be a relative path
         full_path: list = self._root_path + node.current_path
-        abs_path: list = []
-        # condense the path
-        for p in full_path:
-            for pp in p.split('/'):  # to consider '../../../'
-                if pp in ('.', ''):
-                    continue
-                if pp == '..':
-                    abs_path.pop()
-                else:
-                    abs_path.append(pp)
+        abs_path: list = self._to_abs_path(full_path)
 
         os_path: str = '/'.join(abs_path)
         if not node.archive.raw_path_is_file(os_path):
             _populate_result(node.result_root, full_path + ['m_is'], 'Directory')
-            # _populate_result(
-            #     node.result_root, full_path + [Token.RESPONSE, 'pagination'],
-            #     config.pagination.dict() if config.pagination is not None else dict(page=1, page_size=10))
 
         ref_path = ['/'.join(self._root_path)]
         if ref_path[0]:
@@ -1967,16 +1995,51 @@ class FileSystemReader(GeneralReader):
         else:
             ref_path = node.current_path
 
+        if config.pagination is not None:
+            assert isinstance(config.pagination, RawDirPagination)
+            start: int = config.pagination.get_simple_index()
+            pagination: dict = config.pagination.dict(exclude_none=True)
+        else:
+            start = 0
+            pagination = dict(page=1, page_size=10, order='asc')
+        end: int = start + pagination['page_size']
+
+        folders: list = []
+        files: list = []
         file: RawPathInfo
         for file in node.archive.raw_directory_list(
             os_path, recursive=True, depth=config.depth if config.depth else -1
         ):
+            if file.is_file:
+                files.append(file)
+            else:
+                folders.append(file)
+
+        pagination['total'] = len(folders) + len(files)
+
+        _populate_result(
+            node.result_root,
+            full_path + [Token.RESPONSE],
+            _to_response_config(config, pagination=pagination),
+            path_like=True,
+        )
+
+        for index, file in enumerate(itertools.chain(folders, files)):
+            if index >= end:
+                break
+
+            if index < start:
+                continue
+
             if not config.if_include(file.path):
                 continue
 
             results = file._asdict()
             results.pop('access', None)
-            results['m_is'] = 'File' if results.pop('is_file') else 'Directory'
+            if results.pop('is_file'):
+                results['m_is'] = 'File'
+            else:
+                results = {'m_is': 'Directory'}
             if omit_keys is None or all(
                 not file.path.endswith(os.path.sep + k) for k in omit_keys
             ):
@@ -1999,7 +2062,7 @@ class FileSystemReader(GeneralReader):
     def validate_config(cls, key: str, config: RequestConfig):
         try:
             if config.pagination is not None:
-                config.pagination = Pagination.parse_obj(config.pagination)
+                config.pagination = RawDirPagination.parse_obj(config.pagination)
         except Exception as e:
             raise ConfigError(str(e))
 
@@ -2058,7 +2121,7 @@ class ArchiveReader(GeneralReader):
         self.package_pool: dict = {}
 
     @staticmethod
-    def __if_strip(node: GraphNode, config: RequestConfig):
+    def __if_strip(node: GraphNode, config: RequestConfig, *, depth_check: bool = True):
         if (
             config.max_list_size is not None
             and isinstance(node.archive, list)
@@ -2073,7 +2136,11 @@ class ArchiveReader(GeneralReader):
         ):
             return True
 
-        if config.depth is not None and node.current_depth > config.depth:
+        if (
+            depth_check
+            and config.depth is not None
+            and node.current_depth >= config.depth
+        ):
             return True
 
         return False
@@ -2162,6 +2229,11 @@ class ArchiveReader(GeneralReader):
                 continue
 
             if key == Token.DEF:
+                if isinstance(node.definition, Quantity):
+                    self._log(
+                        f'Only support "m_def" token on sections, try defining "m_def" request on the parent.'
+                    )
+                    continue
                 with DefinitionReader(
                     value,
                     user=self.user,
@@ -2309,7 +2381,7 @@ class ArchiveReader(GeneralReader):
                     self._log(f'Definition {key} is not found.')
                     continue
 
-                if isinstance(child_definition, SubSection):
+                if is_subsection := isinstance(child_definition, SubSection):
                     child_definition = child_definition.sub_section
 
                 child_node = node.replace(
@@ -2319,7 +2391,7 @@ class ArchiveReader(GeneralReader):
                     current_depth=node.current_depth + 1,
                 )
 
-                if self.__if_strip(child_node, config):
+                if self.__if_strip(child_node, config, depth_check=is_subsection):
                     _populate_result(
                         node.result_root,
                         child_node.current_path,
@@ -2622,8 +2694,6 @@ class DefinitionReader(GeneralReader):
             is_plain_container: bool = (
                 False if is_list else isinstance(child_def, (list, set, dict))
             )
-            if is_plain_container and isinstance(child_def, (list, set)):
-                child_def = {v.name: v for v in child_def}
 
             child_path: list = node.current_path + [name]
 
@@ -2641,12 +2711,36 @@ class DefinitionReader(GeneralReader):
                 # this is a derived quantity like 'all_properties', 'all_quantities', etc.
                 # just write reference strings to the corresponding paths
                 # whether they shall be resolved or not is determined by the config and will be handled later
-                for k, v in child_def.items():
-                    _populate_result(node.result_root, child_path + [k], __convert(v))
+                if isinstance(child_def, dict):
+                    _populate_result(node.result_root, child_path, {})
+                    for k, v in child_def.items():
+                        _populate_result(
+                            node.result_root, child_path + [k], __convert(v)
+                        )
+                elif isinstance(child_def, (set, list)):
+                    _populate_result(node.result_root, child_path, [])
+                    for i, v in enumerate(child_def):
+                        _populate_result(
+                            node.result_root, child_path + [str(i)], __convert(v)
+                        )
+                else:
+                    # should never reach here
+                    raise
             elif child_def is node.archive:
                 assert isinstance(child_def, Definition)
                 _populate_result(node.result_root, child_path, __convert(child_def))
 
+            def __handle_derived(__func):
+                if isinstance(child_def, dict):
+                    for _k, _v in child_def.items():
+                        __func(child_path + [_k], _v)
+                elif isinstance(child_def, (set, list)):
+                    for _i, _v in enumerate(child_def):
+                        __func(child_path + [str(_i)], _v)
+                else:
+                    # should never reach here
+                    raise
+
             if isinstance(value, RequestConfig):
                 # this is a leaf, resolve it according to the config
                 def __resolve(__path, __archive):
@@ -2665,8 +2759,7 @@ class DefinitionReader(GeneralReader):
                         __resolve(child_path + [str(i)], child_def[i])
                 elif is_plain_container:
                     if value.directive is DirectiveType.resolved:
-                        for k, v in child_def.items():
-                            __resolve(child_path + [k], v)
+                        __handle_derived(__resolve)
                 else:
                     __resolve(child_path, child_def)
             elif isinstance(value, dict):
@@ -2685,8 +2778,7 @@ class DefinitionReader(GeneralReader):
                     for i in _normalise_index(index, len(child_def)):
                         __walk(child_path + [str(i)], child_def[i])
                 elif is_plain_container:
-                    for k, v in child_def.items():
-                        __walk(child_path + [k], v)
+                    __handle_derived(__walk)
                 else:
                     __walk(child_path, child_def)
             elif isinstance(value, list):
diff --git a/nomad/graph/model.py b/nomad/graph/model.py
index 6a1f9fc4192db9321a59b8e83aa9274617a4d614..d9ff44bf51b4a5605a0f38c47e8fd894b3455bc2 100644
--- a/nomad/graph/model.py
+++ b/nomad/graph/model.py
@@ -25,12 +25,13 @@ from typing import FrozenSet, Optional, Union
 
 from pydantic import BaseModel, Field, Extra, ValidationError, validator
 
-from nomad.app.v1.models import MetadataPagination, Metadata, Pagination
+from nomad.app.v1.models import MetadataPagination, Metadata
 from nomad.app.v1.routers.datasets import DatasetPagination
 from nomad.app.v1.routers.uploads import (
     UploadProcDataQuery,
     UploadProcDataPagination,
     EntryProcDataPagination,
+    RawDirPagination,
 )
 
 
@@ -214,7 +215,7 @@ class RequestConfig(BaseModel):
     )
     pagination: Union[
         dict,
-        Pagination,
+        RawDirPagination,
         DatasetPagination,
         UploadProcDataPagination,
         MetadataPagination,
diff --git a/ops/kubernetes/inspect.yaml b/ops/kubernetes/inspect.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..f50ddd156c2dd75b1a1f305c4310ace71fed5e77
--- /dev/null
+++ b/ops/kubernetes/inspect.yaml
@@ -0,0 +1,17 @@
+apiVersion: v1
+kind: Pod
+metadata:
+  name: pvc-inspector
+  namespace: nomad-prod-develop
+spec:
+  containers:
+  - name: pvc-inspector
+    image: busybox
+    command: ["sh", "-c", "sleep infinity"]
+    volumeMounts:
+    - mountPath: /pvc
+      name: pvc-volume
+  volumes:
+  - name: pvc-volume
+    persistentVolumeClaim:
+      claimName: nomad-prod-develop-north-hub-db-dir
diff --git a/tests/app/v1/routers/test_graph.py b/tests/app/v1/routers/test_graph.py
index a00acd6dffb931af655aa43454b53271b22d686b..e90f618bb076b91238bcb341149a9f6f5f2f2432 100644
--- a/tests/app/v1/routers/test_graph.py
+++ b/tests/app/v1/routers/test_graph.py
@@ -431,10 +431,18 @@ def test_get_uploads_graph(auth_headers, client, example_data, kwargs):
                     'id_unpublished': {
                         'files': {
                             'm_is': 'Directory',
+                            'm_response': {
+                                'depth': 1,
+                                'include': ['*'],
+                                'pagination': {
+                                    'order': 'asc',
+                                    'page': 1,
+                                    'page_size': 10,
+                                    'total': 1,
+                                },
+                            },
                             'test_content': {
                                 'm_is': 'Directory',
-                                'path': 'test_content',
-                                'size': 0,
                             },
                         }
                     }
@@ -449,15 +457,21 @@ def test_get_uploads_graph(auth_headers, client, example_data, kwargs):
                     'id_unpublished': {
                         'files': {
                             'm_is': 'Directory',
+                            'm_response': {
+                                'depth': 2,
+                                'include': ['*'],
+                                'pagination': {
+                                    'order': 'asc',
+                                    'page': 1,
+                                    'page_size': 10,
+                                    'total': 2,
+                                },
+                            },
                             'test_content': {
                                 'id_unpublished_1': {
                                     'm_is': 'Directory',
-                                    'path': 'test_content/id_unpublished_1',
-                                    'size': 0,
                                 },
                                 'm_is': 'Directory',
-                                'path': 'test_content',
-                                'size': 0,
                             },
                         }
                     }
@@ -472,6 +486,16 @@ def test_get_uploads_graph(auth_headers, client, example_data, kwargs):
                     'id_unpublished': {
                         'files': {
                             'm_is': 'Directory',
+                            'm_response': {
+                                'depth': 3,
+                                'include': ['*'],
+                                'pagination': {
+                                    'order': 'asc',
+                                    'page': 1,
+                                    'page_size': 10,
+                                    'total': 7,
+                                },
+                            },
                             'test_content': {
                                 'id_unpublished_1': {
                                     '1.aux': {
@@ -500,12 +524,8 @@ def test_get_uploads_graph(auth_headers, client, example_data, kwargs):
                                         'path': 'test_content/id_unpublished_1/mainfile.json',
                                         'size': 3227,
                                     },
-                                    'path': 'test_content/id_unpublished_1',
-                                    'size': 3259,
                                 },
                                 'm_is': 'Directory',
-                                'path': 'test_content',
-                                'size': 3259,
                             },
                         }
                     }
diff --git a/tests/data/proc/nested.zip b/tests/data/proc/nested.zip
new file mode 100644
index 0000000000000000000000000000000000000000..531aacb5f68763cb76fa3b2c5d281ccfbfc489fa
Binary files /dev/null and b/tests/data/proc/nested.zip differ
diff --git a/tests/graph/test_definition_reader.py b/tests/graph/test_definition_reader.py
index 234e1a70e14b87a6927024c6c238aee3ceed8189..9e1e904e98e22eba03fcc8995216969ae87b2410 100644
--- a/tests/graph/test_definition_reader.py
+++ b/tests/graph/test_definition_reader.py
@@ -306,7 +306,11 @@ def assert_dict(d1, d2):
             id='resolve-with-depth',
         ),
         pytest.param(
-            {'all_quantities': {'m_request': {'directive': 'plain'}}},
+            {
+                'all_quantities': {'m_request': {'directive': 'plain'}},
+                'inherited_sections': {'m_request': {'directive': 'plain'}},
+                'all_base_sections': {'m_request': {'directive': 'plain'}},
+            },
             {
                 'm_def': f'{prefix}/3',
                 'metainfo': {
@@ -319,7 +323,11 @@ def assert_dict(d1, d2):
                                 'all_quantities': {
                                     'base': f'{prefix}/3/quantities/0',
                                     'derived': f'{prefix}/3/quantities/1',
-                                }
+                                },
+                                'inherited_sections': [
+                                    'metainfo/tests.graph.test_definition_reader/section_definitions/3'
+                                ],
+                                'all_base_sections': [],
                             },
                         ]
                     }
diff --git a/tests/graph/test_graph_reader.py b/tests/graph/test_graph_reader.py
index a549744f4edffc5f553990dc1fe917876bab3e45..e38f66e883fd39dd6805a8ea1efed66d82dc4a5c 100644
--- a/tests/graph/test_graph_reader.py
+++ b/tests/graph/test_graph_reader.py
@@ -67,8 +67,8 @@ def assert_list(l1, l2):
 def assert_dict(d1, d2):
     if GeneralReader.__CACHE__ in d1:
         del d1[GeneralReader.__CACHE__]
-    if 'pagination' in d1:
-        del d1['pagination']
+    if 'm_response' in d1:
+        del d1['m_response']
     if 'm_def' in d1:
         del d1['m_def']
     if 'm_def' in d2:
@@ -639,7 +639,6 @@ def test_remote_reference(json_dict, example_data_with_reference, user1):
                     'n_entries': 6,
                     'upload_files_server_path': 'id_published_with_ref',
                     Token.ENTRIES: {
-                        'm_response': {},
                         'id_02': {
                             'entry_id': 'id_02',
                             'mainfile_path': 'mainfile_for_id_02',
@@ -1438,6 +1437,43 @@ def test_remote_reference(json_dict, example_data_with_reference, user1):
             },
         },
     )
+    __entry_print(
+        'plain entry reader, resolve to root',
+        {
+            Token.ARCHIVE: {
+                'metadata': {
+                    'm_request': {'directive': 'plain', 'depth': 1, 'max_list_size': 1},
+                }
+            }
+        },
+        result={
+            'archive': {
+                'metadata': {
+                    'domain': 'dft',
+                    'embargo_length': 0,
+                    'entry_create_time': '2024-05-28T19:14:10.754059+00:00',
+                    'entry_hash': 'dummy_hash_id_03',
+                    'entry_id': 'id_03',
+                    'entry_references': '__INTERNAL__:../uploads/id_published_with_ref/archive/id_03#/metadata/entry_references',
+                    'license': 'CC BY 4.0',
+                    'main_author': '00000000-0000-0000-0000-000000000001',
+                    'mainfile': 'mainfile_for_id_03',
+                    'n_quantities': 66,
+                    'parser_name': 'parsers/vasp',
+                    'processed': True,
+                    'published': False,
+                    'quantities': '__INTERNAL__:../uploads/id_published_with_ref/archive/id_03#/metadata/quantities',
+                    'section_defs': '__INTERNAL__:../uploads/id_published_with_ref/archive/id_03#/metadata/section_defs',
+                    'sections': '__INTERNAL__:../uploads/id_published_with_ref/archive/id_03#/metadata/sections',
+                    'text_search_contents': [],
+                    'upload_create_time': '2024-05-28T19:14:10.749059+00:00',
+                    'upload_id': 'id_published_with_ref',
+                    'upload_name': 'name_published',
+                    'with_embargo': False,
+                }
+            }
+        },
+    )
     if simulationworkflowschema is not None:
         __entry_print(
             'entry reader to definition reader',
@@ -2117,7 +2153,6 @@ def test_general_reader(json_dict, example_data_with_reference, user1):
         },
         result={
             Token.ENTRIES: {
-                'm_response': {},
                 'id_04': {
                     'process_running': False,
                     'current_process': None,
@@ -2171,7 +2206,6 @@ def test_general_reader(json_dict, example_data_with_reference, user1):
         },
         result={
             Token.ENTRIES: {
-                'm_response': {},
                 'id_01': {
                     'process_running': False,
                     'current_process': None,
@@ -2291,7 +2325,6 @@ def test_general_reader(json_dict, example_data_with_reference, user1):
             Token.SEARCH: {
                 'id_03': 'id_03',
                 'id_04': 'id_04',
-                'm_response': {'query': {'aggregations': {}, 'owner': 'user'}},
             }
         },
     )
@@ -2310,7 +2343,6 @@ def test_general_reader(json_dict, example_data_with_reference, user1):
         },
         result={
             Token.UPLOADS: {
-                'm_response': {},
                 'id_published_with_ref': {
                     'process_running': False,
                     'current_process': 'process_upload',
@@ -2359,7 +2391,6 @@ def test_general_reader(json_dict, example_data_with_reference, user1):
         },
         result={
             Token.UPLOADS: {
-                'm_response': {'query': {'is_processing': False}},
                 'id_published_with_ref': {
                     'process_running': False,
                     'current_process': 'process_upload',
@@ -2487,7 +2518,6 @@ def test_general_reader_search(json_dict, example_data_with_reference, user1):
         },
         result={
             'search': {
-                'm_response': {'query': {'aggregations': {}, 'owner': 'public'}},
                 'id_01': {
                     'entries': {
                         'mainfile': {
@@ -2593,7 +2623,12 @@ data:
     data.create_entry(
         upload_id='id_custom', entry_id='id_example', entry_archive=archive
     )
-    data.save(with_files=True, with_es=True, with_mongo=True)
+    data.save(
+        with_files=True,
+        with_es=True,
+        with_mongo=True,
+        additional_files_path='tests/data/proc/nested.zip',
+    )
 
     yield data
 
@@ -2709,6 +2744,132 @@ def test_custom_schema_archive_and_definition(user1, custom_data):
         },
     )
 
+    def __fs_print(msg, required, *, result: dict = None):
+        with FileSystemReader(required, user=user1) as reader:
+            if result:
+                assert_dict(reader.read('id_custom'), result)
+            else:
+                rprint(f'\n\nExample: {next(counter)} -> {msg}:')
+                rprint(required)
+                rprint('output:')
+                rprint(reader.read('id_custom'))
+
+    __fs_print(
+        'one level deep second page',
+        {
+            'm_request': {
+                'directive': 'plain',
+                'depth': 1,
+                'pagination': {'page_size': 10, 'page': 2},
+            },
+        },
+        result={
+            'm_is': 'Directory',
+            'mainfile_for_id_example': {
+                'm_is': 'File',
+                'path': 'mainfile_for_id_example',
+                'size': 3227,
+            },
+        },
+    )
+
+    __fs_print(
+        'two levels',
+        {
+            'm_request': {
+                'directive': 'plain',
+                'depth': 2,
+                'pagination': {'page_size': 10, 'page': 2},
+            },
+        },
+        result={
+            'm_is': 'Directory',
+            '3.aux': {'m_is': 'File', 'path': '3.aux', 'size': 8},
+            '4.aux': {'m_is': 'File', 'path': '4.aux', 'size': 8},
+            'edge_names': {
+                '!"§$%&()=?.txt': {
+                    'm_is': 'File',
+                    'path': 'edge_names/!"§$%&()=?.txt',
+                    'size': 0,
+                },
+                'suuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuper-long.txt': {
+                    'm_is': 'File',
+                    'path': 'edge_names/suuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuper-long.txt',
+                    'size': 0,
+                },
+            },
+            'entry.archive.json': {
+                'm_is': 'File',
+                'path': 'entry.archive.json',
+                'size': 185,
+            },
+            'file.txt': {'m_is': 'File', 'path': 'file.txt', 'size': 0},
+            'mainfile_for_id_example': {
+                'm_is': 'File',
+                'path': 'mainfile_for_id_example',
+                'size': 3227,
+            },
+            'many_files': {
+                'file1.txt': {
+                    'm_is': 'File',
+                    'path': 'many_files/file1.txt',
+                    'size': 0,
+                },
+                'file10.txt': {
+                    'm_is': 'File',
+                    'path': 'many_files/file10.txt',
+                    'size': 0,
+                },
+                'file100.txt': {
+                    'm_is': 'File',
+                    'path': 'many_files/file100.txt',
+                    'size': 0,
+                },
+            },
+        },
+    )
+
+    __fs_print(
+        'different configs',
+        {
+            'm_request': {
+                'directive': 'plain',
+                'depth': 1,
+                'pagination': {'page_size': 2, 'page': 2},
+            },
+            'many_files': {
+                'm_request': {
+                    'directive': 'plain',
+                    'depth': 1,
+                    'pagination': {'page_size': 3, 'page': 2},
+                },
+            },
+        },
+        result={
+            'm_is': 'Directory',
+            'preview': {'m_is': 'Directory'},
+            'subdirs': {'m_is': 'Directory'},
+            'many_files': {
+                'm_is': 'Directory',
+                'file11.txt': {
+                    'm_is': 'File',
+                    'path': 'many_files/file11.txt',
+                    'size': 0,
+                },
+                'file12.txt': {
+                    'm_is': 'File',
+                    'path': 'many_files/file12.txt',
+                    'size': 0,
+                },
+                'file13.txt': {
+                    'm_is': 'File',
+                    'path': 'many_files/file13.txt',
+                    'size': 0,
+                },
+            },
+        },
+    )
+
 
 @pytest.fixture(scope='function')
 def example_data_with_reference(