diff --git a/docs/howto/plugins/apis.md b/docs/howto/plugins/apis.md
new file mode 100644
index 0000000000000000000000000000000000000000..50e28d79e51465c97a0770f282add396a9a54147
--- /dev/null
+++ b/docs/howto/plugins/apis.md
@@ -0,0 +1,96 @@
+# How to write an API
+
+APIs allow you to add more APIs to the NOMAD app. More specifically you can create
+a [FastAPI](https://fastapi.tiangolo.com) apps that can be mounted into the main NOMAD app alongside other apis
+such as `/api/v1`, `/optimade`, etc.
+
+This documentation shows you how to write a plugin entry point for an API.
+You should read the [documentation on getting started with plugins](./plugins.md)
+to have a basic understanding of how plugins and plugin entry points work in the NOMAD ecosystem.
+
+## Getting started
+
+You can use our [template repository](https://github.com/FAIRmat-NFDI/nomad-plugin-template) to
+create an initial structure for a plugin containing an API.
+The relevant part of the repository layout will look something like this:
+
+```txt
+nomad-example
+   ├── src
+   │   ├── nomad_example
+   │   │   ├── apis
+   │   │   │   ├── __init__.py
+   │   │   │   ├── myapi.py
+   ├── LICENSE.txt
+   ├── README.md
+   └── pyproject.toml
+```
+
+See the documentation on [plugin development guidelines](./plugins.md#plugin-development-guidelines)
+for more details on the best development practices for plugins, including linting, testing and documenting.
+
+## API entry point
+
+The entry point defines basic information about your API and is used to automatically
+load it into a NOMAD distribution. It is an instance of a `APIEntryPoint` or its subclass and it contains a `load` method which returns a `fastapi.FastAPI` app instance.
+Furthermore, it allows you to define a path prefix for your API.
+The entry point should be defined in `*/apis/__init__.py` like this:
+
+```python
+from pydantic import Field
+from nomad.config.models.plugins import APIEntryPoint
+
+
+class MyAPIEntryPoint(APIEntryPoint):
+
+    def load(self):
+        from nomad_example.apis.myapi import app
+
+        return app
+
+
+myapi = MyAPIEntryPoint(
+    prefix = 'myapi',
+    name = 'MyAPI',
+    description = 'My custom API.',
+)
+```
+
+Here you can see that a new subclass of `APIEntryPoint` was defined. In this new class you have to override the `load` method to determine the FastAPI app that makes your API.
+In the reference you can see all of the available [configuration options for a `APIEntryPoint`](../../reference/plugins.md#apientrypoint).
+
+The entry point instance should then be added to the `[project.entry-points.'nomad.plugin']` table in `pyproject.toml` in order for it to be automatically detected:
+
+```toml
+[project.entry-points.'nomad.plugin']
+myapi = "nomad_example.apis:myapi"
+```
+
+## The FastAPI app
+
+The `load`-method of an API entry point has to return an instance of a `fastapi.FastAPI`.
+This app should be implemented in a separate file (e.g. `*/apis/myapi.py`) and could look like this:
+
+```python
+from fastapi import FastAPI
+from nomad.config import config
+
+myapi_entry_point = config.get_plugin_entry_point('nomad_example.apis:myapi')
+
+app = FastAPI(
+    root_path=f'{config.services.api_base}/{myapi_entry_points.prefix}'
+)
+
+app.get('/')
+async def root():
+    return {"message": "Hello World"}
+```
+
+Read the official [FastAPI documentation](https://fastapi.tiangolo.com/tutorial/) to learn how to build apps and APIs with
+FastAPI.
+
+If you run NOMAD with this plugin following our [Oasis installation documentation](../oasis/install.md) and our [plugin installation documentation](../oasis/plugins_install.md), you can curl this API and should receive the message:
+
+```sh
+curl localhost/nomad-oasis/myapi
+```
diff --git a/docs/reference/plugins.md b/docs/reference/plugins.md
index 1178e2eec3fb6b925f73ba50791d5a9a6fd5413c..5b7324de2491114493f1c15610e4277111e053f2 100644
--- a/docs/reference/plugins.md
+++ b/docs/reference/plugins.md
@@ -15,6 +15,7 @@ This is a list of the available plugin entry point configuration models.
 {{ pydantic_model('nomad.config.models.plugins.NormalizerEntryPoint') }}
 {{ pydantic_model('nomad.config.models.plugins.ParserEntryPoint') }}
 {{ pydantic_model('nomad.config.models.plugins.SchemaPackageEntryPoint') }}
+{{ pydantic_model('nomad.config.models.plugins.APIEntryPoint') }}
 
 ## Default plugin entry points
 
diff --git a/mkdocs.yml b/mkdocs.yml
index 13bc1d39cc34960163f093714320c9c8814bfce9..dd8e8e8603e07591967a3bbf53c10dc8ffe3924f 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -40,6 +40,7 @@ nav:
           - Write a normalizer: howto/plugins/normalizers.md
           - Write a parser: howto/plugins/parsers.md
           - Write a schema package: howto/plugins/schema_packages.md
+          - Write an API: howto/plugins/apis.md
       - Customization:
           - Write a YAML schema package: howto/customization/basics.md
           - Define ELNs: howto/customization/elns.md
diff --git a/nomad/app/main.py b/nomad/app/main.py
index d916e20773b1698f1c20d8dfc2eb2f66608add73..2168e5fd965f7e7bd939d5779e7f707dd2cf7c6f 100644
--- a/nomad/app/main.py
+++ b/nomad/app/main.py
@@ -29,6 +29,7 @@ from starlette.middleware.base import BaseHTTPMiddleware
 
 from nomad import infrastructure
 from nomad.config import config
+from nomad.config.models.plugins import APIEntryPoint
 
 from .v1.main import app as v1_app
 from .static import app as static_files_app, GuiFiles
@@ -96,6 +97,12 @@ if config.resources.enabled:
 
     app.mount(f'{app_base}/resources', resources_app)
 
+# Add API plugins
+for entry_point in config.plugins.entry_points.filtered_values():
+    if isinstance(entry_point, APIEntryPoint):
+        api_app = entry_point.load()
+        app.mount(f'{app_base}/{entry_point.prefix}', api_app)
+
 # Make sure to mount this last, as it is a catch-all routes that are not yet mounted.
 app.mount(app_base, static_files_app)
 
diff --git a/nomad/app/v1/models/graph/__main__.py b/nomad/app/v1/models/graph/__main__.py
deleted file mode 100644
index 30ebdc1e7e9164ea056a8a05482fd5c12c17bda5..0000000000000000000000000000000000000000
--- a/nomad/app/v1/models/graph/__main__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-import sys
-
-module_prefix = 'nomad.app.v1.models.graph'
-
-setattr(sys.modules[f'{module_prefix}.utils'], 'ref_prefix', '#/definitions')
-model = getattr(sys.modules[f'{module_prefix}.graph_models'], sys.argv[1])
-print(model.schema_json(indent=2))
diff --git a/nomad/app/v1/models/graph/graph_models.py b/nomad/app/v1/models/graph/graph_models.py
index ff4b2151c21999bbcf866d2c235e589bbba52142..f9eaac851f050b923dbe983b357fd86d555fa2b0 100644
--- a/nomad/app/v1/models/graph/graph_models.py
+++ b/nomad/app/v1/models/graph/graph_models.py
@@ -184,6 +184,7 @@ class GraphUploads(BaseModel):
 
 class GraphEntryMetadata(BaseModel, extra=Extra.allow):
     entry: GraphEntry
+    m_children: Any
 
 
 class SearchRequestOptions(BaseModel):
diff --git a/nomad/app/v1/models/graph/utils.py b/nomad/app/v1/models/graph/utils.py
index b32a291b08aa81ee806fbaab6fa4687bd607123a..bd26f6fc09925f3f459046da28092cd9c0f3fdba 100644
--- a/nomad/app/v1/models/graph/utils.py
+++ b/nomad/app/v1/models/graph/utils.py
@@ -52,6 +52,7 @@ import sys
 ref_prefix = '#/components/schemas'
 request_suffix = 'Request'
 response_suffix = 'Response'
+graph_model_export = False
 
 
 class _DictModel(BaseModel):
@@ -110,6 +111,14 @@ class _DictModel(BaseModel):
                 if value_type == Literal['*']:
                     types.append({'enum': ['*'], 'type': 'string'})
                 else:
+                    # This forces all model names to be unique. Pydandic
+                    # replaces non unique model names with qualified names.
+                    # We are just using the plain name here. Unfortunately,
+                    # there is no way to get the "long_model_name" map from pydantic
+                    # to put the right names here. Therefore, in the presence
+                    # of non-unique names, we are using the wrong referenes.
+                    # Things depending on the openapi schema will cause issues.
+                    # i.e. https://gitlab.mpcdf.mpg.de/nomad-lab/nomad-FAIR/-/issues/1958
                     types.append({'$ref': f'{ref_prefix}/{value_type.__name__}'})
 
             if 'properties' in schema:
@@ -196,6 +205,13 @@ def _generate_model(
 
     # We need to populate a forward ref for the model in the ns use it in recursion cases.
     result_model_name = f'{source_model.__name__}{suffix}'
+    if (
+        graph_model_export
+        and result_model_name.startswith('Graph')
+        and result_model_name not in ['GraphRequest', 'GraphResponse']
+    ):
+        result_model_name = result_model_name[5:]
+
     is_ns_origin = len(ns) == 0
     if result_model_name not in ns:
         ns[result_model_name] = ForwardRef(result_model_name)
@@ -307,6 +323,7 @@ def _generate_model(
     assert (
         getattr(sys.modules[source_model.__module__], result_model_name, result_model)
         == result_model
+        or graph_model_export
     ), f'Model class with name {result_model_name} already exists.'
     setattr(sys.modules[source_model.__module__], result_model_name, result_model)
 
diff --git a/nomad/app/v1/routers/entries.py b/nomad/app/v1/routers/entries.py
index 9480871f931f8440903313d732ae1d45bc54d693..53ebf6d4f2f685c41cfe8b6b19cb59863163978d 100644
--- a/nomad/app/v1/routers/entries.py
+++ b/nomad/app/v1/routers/entries.py
@@ -15,9 +15,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-import math
 from datetime import datetime
 
+from enum import Enum
 from typing import Optional, Set, Union, Dict, Iterator, Any, List
 from fastapi import (
     APIRouter,
@@ -38,20 +38,26 @@ import json
 import orjson
 from pydantic.main import create_model
 from starlette.responses import Response
+import yaml
 
 from nomad import files, utils, metainfo, processing as proc
 from nomad import datamodel
 from nomad.config import config
+from nomad.config.models.config import Reprocess
 from nomad.datamodel import EditableUserMetadata
+from nomad.datamodel.context import ServerContext
 from nomad.files import StreamedFile, create_zipstream
+from nomad.processing.data import Upload
 from nomad.utils import strip
 from nomad.archive import RequiredReader, RequiredValidationError, ArchiveQueryError
+from nomad.groups import get_group_ids
 from nomad.search import (
     AuthenticationRequiredError,
+    QueryValidationError,
     SearchError,
+    search,
     update_metadata as es_update_metadata,
 )
-from nomad.search import search, QueryValidationError
 from nomad.metainfo.elasticsearch_extension import entry_type
 
 from .auth import create_user_dependency
@@ -313,6 +319,33 @@ class EntryMetadataEditResponse(EntryMetadataEdit):
     )
 
 
+class ArchiveChangeAction(Enum):
+    upsert = 'upsert'
+    remove = 'remove'
+
+
+class ArchiveChange(BaseModel):
+    path: str
+    new_value: Any
+    action: ArchiveChangeAction = ArchiveChangeAction.upsert
+
+    class Config:
+        @staticmethod
+        def schema_extra(schema, model) -> None:
+            # Removing the title from the Any typed new_value field
+            # makes this a proper JSON schema "any" instead of a named
+            # empty type.
+            del schema['properties']['new_value']['title']
+
+
+class EntryEdit(BaseModel):
+    changes: List[ArchiveChange]
+
+
+class EntryEditResponse(EntryEdit):
+    entry_id: str
+
+
 _bad_owner_response = (
     status.HTTP_401_UNAUTHORIZED,
     {
@@ -1380,6 +1413,137 @@ def answer_entry_archive_request(
         }
 
 
+@router.post(
+    '/{entry_id}/edit',
+    tags=[raw_tag],
+    summary='Edit a raw mainfile in archive format.',
+    response_model=EntryEditResponse,
+    response_model_exclude_unset=True,
+    response_model_exclude_none=True,
+    responses=create_responses(
+        _bad_id_response, _bad_edit_request, _bad_edit_request_authorization
+    ),
+)
+async def post_entry_edit(
+    data: EntryEdit,
+    entry_id: str = Path(
+        ...,
+        description='The unique entry id of the entry to edit.',
+    ),
+    user: User = Depends(create_user_dependency()),
+):
+    response = perform_search(
+        owner=Owner.all_,
+        query={'entry_id': entry_id},
+        required=MetadataRequired(
+            include=['writers', 'writer_groups', 'mainfile', 'upload_id', 'published']
+        ),
+        user_id=user.user_id if user is not None else None,
+    )
+
+    if response.pagination.total == 0:
+        raise HTTPException(
+            status_code=status.HTTP_404_NOT_FOUND,
+            detail='The entry with the given id does not exist or is not visible to you.',
+        )
+
+    is_admin = user.is_admin
+    entry_data = response.data[0]
+    writers = [writer['user_id'] for writer in entry_data.get('writers', [])]
+    writer_groups = response.data[0].get('writer_groups', [])
+    is_writer = user.user_id in writers or not set(
+        get_group_ids(user.user_id)
+    ).isdisjoint(writer_groups)
+
+    if not (is_admin or is_writer):
+        raise HTTPException(
+            status_code=status.HTTP_401_UNAUTHORIZED,
+            detail='Not enough permissions to execute edit request.',
+        )
+
+    if entry_data.get('published', False):
+        raise HTTPException(
+            status_code=status.HTTP_400_BAD_REQUEST,
+            detail='Editing is only allowed for non published entries.',
+        )
+
+    mainfile = entry_data.get('mainfile')
+    upload_id = entry_data.get('upload_id')
+    upload = Upload.get(upload_id)
+    context = ServerContext(upload)
+    archive_data: dict = None
+    with context.raw_file(mainfile, 'rt') as f:
+        if mainfile.endswith('.archive.json'):
+            archive_data = json.load(f)
+        elif mainfile.endswith('.archive.yaml') or mainfile.endswith('.archive.yml'):
+            archive_data = yaml.load(f, Loader=yaml.SafeLoader)
+        else:
+            raise HTTPException(
+                status_code=status.HTTP_400_BAD_REQUEST,
+                detail='The entry mainfile in not in archive format.',
+            )
+
+    def to_key(path_segment: str):
+        try:
+            return int(path_segment)
+        except ValueError:
+            return path_segment
+
+    # TODO this is only covers the most basic case
+    #   - no checks yet, we simply assume that the raw file and the changes
+    #     agree on the schema
+    #   - no handling of concurrent changes yet
+    for change in data.changes:
+        path = change.path.split('/')
+        section_data = archive_data
+        next_key = to_key(path[0])
+
+        for path_index, path_segment in enumerate(path[:-1]):
+            # Usually all keys are str and indicate either a quantity or
+            # a single sub-section. If the next segment is an integer, we
+            # know that the current segment is a repeated sub-section.
+            next_key = to_key(path[path_index + 1])
+            key = to_key(path_segment)
+            repeated_sub_section = isinstance(next_key, int)
+
+            next_value: Union[list, dict] = [] if repeated_sub_section else {}
+
+            if isinstance(section_data, list):
+                if section_data[key] is None:
+                    section_data[key] = next_value
+                section_data = section_data[key]
+            else:
+                section_data = section_data.setdefault(key, next_value)
+
+            # If this is a list, we might need to fill some wholes before we can
+            # update the value.
+            if isinstance(section_data, list):
+                if len(section_data) <= next_key:
+                    section_data.extend([None] * (next_key - len(section_data) + 1))
+
+        if change.action == ArchiveChangeAction.remove:
+            del section_data[next_key]
+        else:
+            section_data[next_key] = change.new_value
+
+    with context.raw_file(mainfile, 'wt') as f:
+        if mainfile.endswith('.json'):
+            json.dump(archive_data, f)
+        else:
+            yaml.dump(archive_data, f, default_flow_style=False, sort_keys=False)
+
+    reprocess_settings = Reprocess(
+        index_individual_entries=True, reprocess_existing_entries=True
+    )
+    upload.put_file_and_process_local(
+        os.path.join(context.raw_path(), mainfile),
+        os.path.dirname(mainfile),
+        reprocess_settings=reprocess_settings,
+    )
+
+    return {'entry_id': entry_id, 'changes': data.changes}
+
+
 @router.get(
     '/{entry_id}/archive',
     tags=[archive_tag],
diff --git a/nomad/cli/client/__init__.py b/nomad/cli/client/__init__.py
index a82959176e97407e027ef3b80557bbe0c47c3afd..e5928da245ba189e5624a2b68672a9e3ef9e81ee 100644
--- a/nomad/cli/client/__init__.py
+++ b/nomad/cli/client/__init__.py
@@ -62,16 +62,22 @@ def client(
 
 def _create_auth(ctx):
     print(f'Used nomad is {config.client.url}')
-    print(f'Used user is {ctx.obj.user}')
+    print(
+        f'Used user from CLI args is {ctx.obj.user}, using config.client.user {config.client.user}'
+    )
 
     from nomad.client import Auth
 
     if ctx.obj.user is None:
-        return None
-
-    return Auth(
-        user=ctx.obj.user, password=ctx.obj.password, from_api=ctx.obj.token_via_api
-    )
+        return Auth(
+            user=config.client.user,
+            password=config.client.password,
+            from_api=ctx.obj.token_via_api,
+        )
+    else:
+        return Auth(
+            user=ctx.obj.user, password=ctx.obj.password, from_api=ctx.obj.token_via_api
+        )
 
 
 @client.command(help='Runs a few example operations as a test.')
diff --git a/nomad/cli/dev.py b/nomad/cli/dev.py
index 9aaadbbf6108b4872a909fbbb56b49e48ef30fb1..9bf52df4bf4db658be37d55e52d7ebf056d39a70 100644
--- a/nomad/cli/dev.py
+++ b/nomad/cli/dev.py
@@ -78,6 +78,36 @@ def gui_qa(skip_tests: bool):
     sys.exit(ret_code)
 
 
+@dev.command(help='Export an API model in JSON schema.')
+@click.argument('model')
+def api_model(model):
+    import importlib
+
+    if model in [
+        'nomad.app.v1.models.graph.GraphRequest',
+        'nomad.app.v1.models.graph.GraphResponse',
+    ]:
+        from nomad.app.v1.models.graph.utils import (
+            generate_request_model,
+            generate_response_model,
+        )
+        from nomad.app.v1.models.graph.graph_models import Graph
+
+        sys.modules['nomad.app.v1.models.graph.utils'].ref_prefix = '#/definitions'
+        sys.modules['nomad.app.v1.models.graph.utils'].graph_model_export = True
+
+        if model == 'nomad.app.v1.models.graph.GraphRequest':
+            model = generate_request_model(Graph)
+        else:
+            model = generate_response_model(Graph)
+        print(model.schema_json(indent=2))
+    else:
+        pkg, cls = model.rsplit('.', 1)
+        importlib.import_module(pkg)
+        model = getattr(sys.modules[pkg], cls)
+        print(model.schema_json(indent=2))
+
+
 def get_gui_artifacts_js() -> str:
     from nomad.datamodel import all_metainfo_packages
     from nomad.parsing.parsers import code_metadata
diff --git a/nomad/config/models/plugins.py b/nomad/config/models/plugins.py
index 9416446d42edcbcf0c8c8c62737ce1f1f6bd0669..506114afc6e97b2faba9ed6486086202250cd231 100644
--- a/nomad/config/models/plugins.py
+++ b/nomad/config/models/plugins.py
@@ -37,6 +37,7 @@ if TYPE_CHECKING:
     from nomad.metainfo import SchemaPackage
     from nomad.normalizing import Normalizer as NormalizerBaseClass
     from nomad.parsing import Parser as ParserBaseClass
+    from fastapi import FastAPI
 
 
 class EntryPoint(BaseModel):
@@ -299,6 +300,44 @@ class ExampleUploadEntryPoint(EntryPoint):
         )
 
 
+class APIEntryPoint(EntryPoint, metaclass=ABCMeta):
+    """Base model for API plugin entry points."""
+
+    entry_point_type: Literal['api'] = Field(
+        'api', description='Specifies the entry point type.'
+    )
+
+    prefix: str = Field(
+        None,
+        description=(
+            'The prefix for the API. The URL for the API will be the base URL of the NOMAD '
+            'installation followed by this prefix. The prefix must not collide with any other '
+            'API prefixes. There is no default, this field must be set.'
+        ),
+    )
+
+    @root_validator(pre=True)
+    def prefix_must_be_defined_and_valid(cls, v):
+        import urllib.parse
+
+        if 'prefix' not in v:
+            raise ValueError('prefix must be defined')
+        if not v['prefix']:
+            raise ValueError('prefix must be defined')
+        if urllib.parse.quote(v['prefix']) != v['prefix']:
+            raise ValueError('prefix must be a valid URL path')
+
+        v['prefix'] = v['prefix'].strip('/')
+        return v
+
+    @abstractmethod
+    def load(self) -> 'FastAPI':
+        """Used to lazy-load the API instance. You should override this
+        method in your subclass. Note that any Python module imports required
+        for the API should be done within this function as well."""
+        pass
+
+
 class PluginBase(BaseModel):
     """
     Base model for a NOMAD plugin.
@@ -536,6 +575,7 @@ EntryPointType = Union[
     NormalizerEntryPoint,
     AppEntryPoint,
     ExampleUploadEntryPoint,
+    APIEntryPoint,
 ]
 
 
diff --git a/nomad/files.py b/nomad/files.py
index 30a3c334f992151b01501ce1747106f7709dc4ae..7a4a4677b143864f323297bd426c921ae9686cb8 100644
--- a/nomad/files.py
+++ b/nomad/files.py
@@ -1110,7 +1110,12 @@ class StagingUploadFiles(UploadFiles):
                             shutil.move(element_source_path, element_target_path)
                         else:
                             # Copy the file
-                            shutil.copyfile(element_source_path, element_target_path)
+                            try:
+                                shutil.copyfile(
+                                    element_source_path, element_target_path
+                                )
+                            except shutil.SameFileError:
+                                pass
                         if updated_files is not None:
                             updated_files.add(
                                 os.path.join(target_dir, element_relative_path)
diff --git a/nomad/parsing/parser.py b/nomad/parsing/parser.py
index 680826748397e0214787bc9864f228a158b618ae..7e95da9792f2155e97e8bb1a0ed79b2b344c474b 100644
--- a/nomad/parsing/parser.py
+++ b/nomad/parsing/parser.py
@@ -527,9 +527,11 @@ class ArchiveParser(MatchingParser):
 
         if metadata_data is not None:
             self.domain = metadata_data.get('domain')
-            # Setting metadata in this way is not supported (any more)
-            if entry_name := metadata_data.get('entry_name', None):
-                archive.metadata.entry_name = entry_name
+            for quantity_name in ['entry_name', 'references', 'comment']:
+                quantity = EntryMetadata.m_def.all_quantities[quantity_name]
+                if value := metadata_data.get(quantity_name, None):
+                    archive.metadata.m_set(quantity, value)
+
             del archive_data[EntryArchive.metadata.name]
 
         # ensure that definitions are parsed first to make them available for the
diff --git a/tests/app/v1/routers/test_entries_archive_edit.py b/tests/app/v1/routers/test_entries_archive_edit.py
new file mode 100644
index 0000000000000000000000000000000000000000..8a2e90107dbffd9b62c3d1d9677d5627e4ed7559
--- /dev/null
+++ b/tests/app/v1/routers/test_entries_archive_edit.py
@@ -0,0 +1,162 @@
+#
+# Copyright The NOMAD Authors.
+#
+# This file is part of NOMAD. See https://nomad-lab.eu for further info.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import json
+import pytest
+
+from nomad.datamodel.datamodel import EntryArchive, EntryMetadata
+from nomad.datamodel.metainfo.basesections import BaseSection
+from nomad.utils.exampledata import ExampleData
+from tests.test_files import create_test_upload_files
+
+
+@pytest.mark.parametrize(
+    'edit, result, user',
+    [
+        pytest.param(
+            {'changes': [{'path': 'data/name', 'new_value': 'NewName'}]},
+            {'data': {'name': 'NewName'}},
+            'user1',
+            id='quantity',
+        ),
+        pytest.param(
+            {'changes': [{'path': 'data/sub', 'new_value': {'name': 'NewName'}}]},
+            {'data': {'name': 'TestName', 'sub': {'name': 'NewName'}}},
+            'user1',
+            id='sub-section',
+        ),
+        pytest.param(
+            {'changes': [{'path': 'data/sub/0', 'new_value': {'name': 'NewName'}}]},
+            {'data': {'name': 'TestName', 'sub': [{'name': 'NewName'}]}},
+            'user1',
+            id='repeated-sub-section',
+        ),
+        pytest.param(
+            {'changes': [{'path': 'data/sub/name', 'new_value': 'NewName'}]},
+            {'data': {'name': 'TestName', 'sub': {'name': 'NewName'}}},
+            'user1',
+            id='missing-sub-section',
+        ),
+        pytest.param(
+            {'changes': [{'path': 'data/sub/0/name', 'new_value': 'NewName'}]},
+            {'data': {'name': 'TestName', 'sub': [{'name': 'NewName'}]}},
+            'user1',
+            id='missing-repeated-sub-section',
+        ),
+        pytest.param(
+            {'changes': [{'path': 'data/name', 'action': 'remove'}]},
+            {'data': {}},
+            'user1',
+            id='remove-quantity',
+        ),
+        pytest.param(
+            {
+                'changes': [
+                    {'path': 'data/sub/name', 'new_value': 'NewName'},
+                    {'path': 'data/sub', 'action': 'remove'},
+                ]
+            },
+            {
+                'data': {
+                    'name': 'TestName',
+                }
+            },
+            'user1',
+            id='remove-sub-section',
+        ),
+        pytest.param(
+            {
+                'changes': [
+                    {'path': 'data/sub/1/name', 'new_value': 'NewName'},
+                    {'path': 'data/sub/1', 'action': 'remove'},
+                ]
+            },
+            {'data': {'name': 'TestName', 'sub': [None]}},
+            'user1',
+            id='remove-repeated-sub-section',
+        ),
+        pytest.param(
+            {
+                'changes': [
+                    {'path': 'data/sub/0', 'action': 'upsert', 'new_value': {}},
+                    {
+                        'path': 'data/sub/0/name',
+                        'action': 'upsert',
+                        'new_value': 'NewName1',
+                    },
+                    {'path': 'data/sub/1', 'action': 'upsert', 'new_value': {}},
+                    {
+                        'path': 'data/sub/1/name',
+                        'action': 'upsert',
+                        'new_value': 'NewName2',
+                    },
+                ]
+            },
+            {
+                'data': {
+                    'name': 'TestName',
+                    'sub': [{'name': 'NewName1'}, {'name': 'NewName2'}],
+                }
+            },
+            'user1',
+            id='add-multiple-repeated-sub-section',
+        ),
+    ],
+)
+def test_post_entry_edit(
+    edit,
+    result,
+    user,
+    client,
+    auth_headers,
+    users_dict,
+    elastic_function,
+    mongo_function,
+    raw_files_function,
+):
+    mainfile = 'mainfile.archive.json'
+    data = ExampleData(main_author=users_dict[user])
+    data.create_upload(upload_id='upload_id', published=False)
+    data.create_entry(entry_id='entry_id', upload_id='upload_id', mainfile=mainfile)
+    data.save(with_files=False)
+
+    upload_files = create_test_upload_files('upload_id', published=False, archives=[])
+    with upload_files.raw_file(mainfile, 'wt') as f:
+        json.dump(
+            EntryArchive(
+                metadata=EntryMetadata(
+                    entry_id='entry_id',
+                    mainfile=mainfile,
+                ),
+                data=BaseSection(name='TestName'),
+            ).m_to_dict(),
+            f,
+        )
+
+    user_auth = auth_headers[user]
+    url = 'entries/entry_id/edit'
+    response = client.post(url, headers=user_auth, json=edit)
+
+    assert response.status_code == 200, response.text
+    archive_data = None
+    with upload_files.raw_file(mainfile, 'rt') as f:
+        archive_data = json.load(f)
+
+    assert json.dumps(
+        {key: value for key, value in archive_data['data'].items() if key != 'm_def'}
+    ) == json.dumps(result['data'])
diff --git a/tests/app/v1/test_models.py b/tests/app/v1/test_models.py
index 26627f4384247f4b699f26207f6bc7ba233df027..8b86751a837823ce14ca3ac9bdb454c525fafc16 100644
--- a/tests/app/v1/test_models.py
+++ b/tests/app/v1/test_models.py
@@ -122,7 +122,6 @@ def test_module():
                 '*':
                     entry:
                         process_status: '*'
-                    '*': '*'
             """,
             [
                 'users.m_children.me.uploads.m_request.query.is_published:False',
@@ -134,7 +133,6 @@ def test_module():
                 'users.m_children.me.datasets.m_children.*.doi:*',
                 'search.m_children.*.entry.process_status:*',
                 'search.m_children.*.entry:GraphEntryRequest',
-                'search.m_children.*.*:*',
             ],
             None,
             id='ok',
@@ -184,7 +182,6 @@ def test_validation(request_yaml: str, paths: List[str], error_path: str):
         export_kwargs = dict(
             exclude_unset=True, exclude_defaults=False, exclude_none=False
         )
-        print(request.json(indent=2, **export_kwargs))
         for path in paths:
             assert_path(request, path)
 
diff --git a/tests/config/models/test_plugins.py b/tests/config/models/test_plugins.py
index 9c9018245e38649f292d3bdc578bccc000c8a8f8..5bfda6aec624a9d5049fa0ca5e759e178da9e68f 100644
--- a/tests/config/models/test_plugins.py
+++ b/tests/config/models/test_plugins.py
@@ -21,6 +21,7 @@ import pytest
 
 from nomad.config.models.plugins import (
     ExampleUploadEntryPoint,
+    APIEntryPoint,
     example_upload_path_prefix,
 )
 
@@ -117,3 +118,40 @@ def test_example_upload_entry_point_invalid(config, error, monkeypatch):
             entry_point.load()
 
         assert exc_info.match(error)
+
+
+@pytest.mark.parametrize(
+    'config, error, value',
+    [
+        pytest.param({}, 'prefix must be defined', None, id='prefix-must-be-defined'),
+        pytest.param(
+            {'prefix': None},
+            'prefix must be defined',
+            None,
+            id='prefix-must-not-be-none',
+        ),
+        pytest.param(
+            {'prefix': ''}, 'prefix must be defined', None, id='prefix-must-not-empty'
+        ),
+        pytest.param({'prefix': '/foo/bar/'}, None, 'foo/bar', id='prefix-slashes'),
+        pytest.param(
+            {'prefix': 'not_$url& save'},
+            'prefix must be a valid URL path',
+            None,
+            id='prefix-is-valid-url',
+        ),
+    ],
+)
+def test_api_entry_point_invalid(config, error, value):
+    class MyAPIEntryPoint(APIEntryPoint):
+        def load(self):
+            pass
+
+    if error:
+        with pytest.raises(Exception) as exc_info:
+            MyAPIEntryPoint(**config)
+        assert exc_info.match(error)
+
+    if not error:
+        entry_point = MyAPIEntryPoint(**config)
+        assert entry_point.prefix == value