diff --git a/nomad/app/v1/routers/entries.py b/nomad/app/v1/routers/entries.py
index 69834042b44b59e44a43c0b0428c1baf4dd71273..8e9d07ee6af9510ed74ca58619517c4cf4bd157c 100644
--- a/nomad/app/v1/routers/entries.py
+++ b/nomad/app/v1/routers/entries.py
@@ -1126,12 +1126,10 @@ def edit(query: Query, user: User, mongo_update: Dict[str, Any] = None, re_index
     with utils.timer(logger, 'edit elastic update executed', size=len(entry_ids)):
         if re_index:
             updated_metadata: List[datamodel.EntryMetadata] = []
-            # TODO: Quick and dirty, should remove
-            updated_entry_fields = {k: v for k, v in mongo_update.items() if not k.startswith('metadata__')}
             for calc in proc.Calc.objects(calc_id__in=entry_ids):
                 entry_metadata = calc.mongo_metadata(calc.upload)
                 # Ensure that updated fields are marked as "set", even if they are cleared
-                entry_metadata.m_update_from_dict(updated_entry_fields)
+                entry_metadata.m_update_from_dict(mongo_update)
                 # Add to list
                 updated_metadata.append(entry_metadata)
 
@@ -1220,11 +1218,7 @@ async def post_entry_metadata_edit(
             verify_reference = None
             if isinstance(quantity.type, metainfo.Reference):
                 verify_reference = quantity.type.target_section_def.section_cls
-            # TODO: quick and dirty, we are anyway rewriting this soon
-            if quantity.categories and datamodel.MongoMetadata.m_def in quantity.categories:
-                mongo_key = 'metadata__%s' % quantity.name
-            else:
-                mongo_key = quantity.name
+            mongo_key = quantity.name
             has_error = False
             for action in quantity_actions:
                 action.success = True
diff --git a/nomad/datamodel/__init__.py b/nomad/datamodel/__init__.py
index 7c36b85df1825f57f9c51f89b7984afaecc157ef..2016d4fda2c360b898b7798212092189420bb4b7 100644
--- a/nomad/datamodel/__init__.py
+++ b/nomad/datamodel/__init__.py
@@ -54,11 +54,6 @@ The class :class:`Dataset` is used to represent datasets and their attributes.
 .. autoclass:: nomad.datamodel.Dataset
     :members:
 
-The class :class:`MongoMetadata` is used to tag metadata stored in mongodb.
-
-.. autoclass:: nomad.datamodel.MongoMetadata
-    :members:
-
 The class :class:`EntryMetadata` is used to represent all metadata about an entry.
 
 .. autoclass:: nomad.datamodel.EntryMetadata
@@ -83,7 +78,7 @@ from nomad.metainfo import Environment
 
 from .datamodel import (
     Dataset, User, Author, EditableUserMetadata, UserProvidableMetadata, OasisMetadata,
-    UploadMetadata, MongoUploadMetadata, MongoEntryMetadata, MongoSystemMetadata, MongoMetadata,
+    UploadMetadata, MongoUploadMetadata, MongoEntryMetadata, MongoSystemMetadata,
     EntryMetadata, EntryArchive)
 from .optimade import OptimadeEntry, Species
 from .metainfo import m_env
diff --git a/nomad/datamodel/datamodel.py b/nomad/datamodel/datamodel.py
index 52858ded2d88340bfd9f84de4e64ff60d71bffc5..e9f102b68ed564e9ffd174b9279329005a4dd7ca 100644
--- a/nomad/datamodel/datamodel.py
+++ b/nomad/datamodel/datamodel.py
@@ -311,14 +311,6 @@ class MongoSystemMetadata(metainfo.MCategory):
     pass
 
 
-class MongoMetadata(metainfo.MCategory):
-    '''
-    NOMAD entry quantities that are stored in mongodb on the entry level, in the metadata dict,
-    and not necessarely in the archive.
-    '''
-    pass
-
-
 class DomainMetadata(metainfo.MCategory):
     ''' NOMAD entry quantities that are determined by the uploaded data. '''
     pass
diff --git a/nomad/processing/data.py b/nomad/processing/data.py
index 4fccacbc35f99ac519765b7f59faf64f22d35cff..b1e4ddd671885b3f3dd521e40ec5d1928e1dc6ee 100644
--- a/nomad/processing/data.py
+++ b/nomad/processing/data.py
@@ -62,9 +62,6 @@ section_workflow = datamodel.EntryArchive.workflow.name
 section_results = datamodel.EntryArchive.results.name
 
 
-_old_mongo_metadata = tuple(
-    quantity.name for quantity in datamodel.MongoMetadata.m_def.definitions)
-
 _mongo_upload_metadata = tuple(
     quantity.name for quantity in MongoUploadMetadata.m_def.definitions)
 _mongo_entry_metadata = tuple(
@@ -174,8 +171,6 @@ class Calc(Proc):
         references: user provided references (URLs) for this entry
         coauthors: a user provided list of co-authors
         datasets: a list of user curated datasets this entry belongs to
-
-        metadata: the metadata record wit calc and user metadata, see :class:`EntryMetadata`
     '''
     upload_id = StringField()
     calc_id = StringField(primary_key=True)
@@ -196,8 +191,6 @@ class Calc(Proc):
     shared_with = ListField(StringField(), default=None)
     datasets = ListField(StringField(), default=None)
 
-    metadata = DictField()  # Stores user provided metadata and system metadata (not archive metadata)
-
     meta: Any = {
         'strict': False,
         'indexes': [
@@ -251,7 +244,7 @@ class Calc(Proc):
     def _initialize_metadata_for_processing(self):
         '''
         Initializes self._entry_metadata and self._parser_results in preparation for processing.
-        Existing values in self.metadata are loaded first, then generated system values are
+        Existing values in mongo are loaded first, then generated system values are
         applied.
         '''
         self._entry_metadata = EntryMetadata()
@@ -333,7 +326,6 @@ class Calc(Proc):
         and applies the values to `entry_metadata`.
         '''
         assert upload.upload_id == self.upload_id, 'Could not apply metadata: upload_id mismatch'
-        entry_metadata.m_update_from_dict(self.metadata)  # TODO: Flatten?
         # Upload metadata
         for field in _mongo_upload_metadata:
             setattr(entry_metadata, field, getattr(upload, field))
@@ -357,10 +349,6 @@ class Calc(Proc):
         for field in _mongo_entry_metadata_except_system_fields:
             setattr(self, field, entry_metadata_dict.get(field))
 
-        self.metadata = entry_metadata.m_to_dict(
-            include_defaults=True,
-            categories=[datamodel.MongoMetadata])  # TODO use embedded doc? Flatten?
-
     def set_mongo_entry_metadata(self, *args, **kwargs):
         '''
         Sets the entry level metadata in mongo. Expects either a positional argument
@@ -375,8 +363,6 @@ class Calc(Proc):
             for key, value in kwargs.items():
                 if key in _mongo_entry_metadata_except_system_fields:
                     setattr(self, key, value)
-                elif key in _old_mongo_metadata:
-                    self.metadata[key] = value
                 else:
                     assert False, f'Cannot set metadata field: {key}'
 
@@ -1365,7 +1351,7 @@ class Upload(Proc):
                 with utils.timer(logger, 'calcs processing called'):
                     # process call calcs
                     Calc.process_all(
-                        Calc.process_calc, dict(upload_id=self.upload_id), exclude=['metadata'],
+                        Calc.process_calc, dict(upload_id=self.upload_id),
                         process_kwargs=dict(reprocess_settings=settings))
                     logger.info('completed to trigger process of all calcs')
 
@@ -1764,7 +1750,7 @@ class Upload(Proc):
                 'upload.embargo_length',
                 'entries')
             required_keys_entry_level = (
-                '_id', 'upload_id', 'mainfile', 'parser_name', 'process_status', 'entry_create_time', 'metadata')
+                '_id', 'upload_id', 'mainfile', 'parser_name', 'process_status', 'entry_create_time')
             required_keys_datasets = (
                 'dataset_id', 'dataset_name', 'user_id')
 
@@ -1864,7 +1850,7 @@ class Upload(Proc):
                 # Instantiate an entry object from the json, and validate it
                 entry_keys_to_copy = list(_mongo_entry_metadata)
                 entry_keys_to_copy.extend((
-                    'upload_id', 'metadata', 'errors', 'warnings',
+                    'upload_id', 'errors', 'warnings',
                     'last_status_message', 'current_process', 'current_process_step',
                     'complete_time', 'worker_hostname', 'celery_task_id'))
                 try: