From d07e4424950cd3f82d821ee71a7d85d776dbc2d2 Mon Sep 17 00:00:00 2001
From: David Sikter <david.sikter@physik.hu-berlin.de>
Date: Tue, 28 Sep 2021 15:13:02 +0200
Subject: [PATCH] Rename Dataset.name to Dataset.dataset_name

---
 examples/tutorials/api_with_requests.py       |  4 ++--
 gui/src/components/dataset/DatasetPage.js     |  2 +-
 .../entry/EditUserMetadataDialog.js           |  4 ++--
 gui/src/components/entry/OverviewView.js      |  2 +-
 gui/src/components/entry/OverviewView.spec.js |  2 +-
 gui/src/components/search/SearchContext.js    |  2 +-
 .../search/menus/FilterSubMenuDataset.js      |  2 +-
 .../search/results/SearchResultsEntries.js    |  2 +-
 gui/tests/DFTBulk.js                          |  2 +-
 nomad/app/v1/models.py                        |  2 +-
 nomad/app/v1/routers/datasets.py              | 14 ++++++------
 nomad/app/v1/routers/entries.py               |  6 ++---
 nomad/cli/client/integrationtests.py          |  8 +++----
 nomad/datamodel/datamodel.py                  |  4 ++--
 nomad/parsing/parsers.py                      |  3 ++-
 nomad/processing/data.py                      | 16 +++++++-------
 tests/app/v1/routers/test_datasets.py         | 20 ++++++++---------
 tests/app/v1/routers/test_entries_edit.py     | 22 +++++++++----------
 tests/app/v1/routers/test_uploads.py          |  2 +-
 tests/conftest.py                             |  4 ++--
 tests/processing/test_data.py                 |  2 +-
 tests/test_datamodel.py                       |  4 ++--
 22 files changed, 65 insertions(+), 64 deletions(-)

diff --git a/examples/tutorials/api_with_requests.py b/examples/tutorials/api_with_requests.py
index 9e83e24a4e..aad036fb72 100644
--- a/examples/tutorials/api_with_requests.py
+++ b/examples/tutorials/api_with_requests.py
@@ -9,10 +9,10 @@ import json
 
 base_url = 'http://nomad-lab.eu/prod/rae/api'
 
-# response = requests.get(base_url + '/repo?datasets.name=NOMAD%20webinar')
+# response = requests.get(base_url + '/repo?datasets.dataset_name=NOMAD%20webinar')
 response = requests.get(
     base_url + '/repo',
-    params={'datasets.name': 'NOMAD webinar', 'per_page': 1})
+    params={'datasets.dataset_name': 'NOMAD webinar', 'per_page': 1})
 
 data = response.json()
 upload_id = data['results'][0]['upload_id']
diff --git a/gui/src/components/dataset/DatasetPage.js b/gui/src/components/dataset/DatasetPage.js
index 4ec525969a..c0dea5374f 100644
--- a/gui/src/components/dataset/DatasetPage.js
+++ b/gui/src/components/dataset/DatasetPage.js
@@ -63,7 +63,7 @@ const UserdataPage = React.memo(({match}) => {
     <Search header={
       <div className={styles.header}>
         <Typography variant="h4">
-          {dataset.name || (dataset.isEmpty && 'Empty or non existing dataset') || 'loading ...'}
+          {dataset.dataset_name || (dataset.isEmpty && 'Empty or non existing dataset') || 'loading ...'}
         </Typography>
         <Typography>
           dataset{dataset.doi ? <span>, with DOI <DOI doi={dataset.doi} /></span> : ''}
diff --git a/gui/src/components/entry/EditUserMetadataDialog.js b/gui/src/components/entry/EditUserMetadataDialog.js
index 56b5095635..9ef244beea 100644
--- a/gui/src/components/entry/EditUserMetadataDialog.js
+++ b/gui/src/components/entry/EditUserMetadataDialog.js
@@ -337,7 +337,7 @@ class DatasetInputUnstyled extends React.Component {
     const {api} = this.props
     query = query.toLowerCase()
     return api.getDatasets(query)
-      .then(result => result.results.map(ds => ds.name))
+      .then(result => result.results.map(ds => ds.dataset_name))
       .catch(err => {
         console.error(err)
         return []
@@ -814,7 +814,7 @@ class EditUserMetadataDialogUnstyled extends React.Component {
       shared_with: (example.owners || [])
         .filter(user => user.user_id !== example.uploader.user_id)
         .map(user => user.user_id),
-      datasets: (example.datasets || []).map(ds => ds.name)
+      datasets: (example.datasets || []).map(ds => ds.dataset_name)
     }
   }
 
diff --git a/gui/src/components/entry/OverviewView.js b/gui/src/components/entry/OverviewView.js
index 48177d64c6..db309e4176 100644
--- a/gui/src/components/entry/OverviewView.js
+++ b/gui/src/components/entry/OverviewView.js
@@ -154,7 +154,7 @@ const OverviewView = React.memo(function OverviewView({entryId, ...moreProps}) {
               <div>
                 {entry.datasets.map(ds => (
                   <Typography key={ds.dataset_id}>
-                    <Link component={RouterLink} to={`/dataset/id/${ds.dataset_id}`}>{ds.name}</Link>
+                    <Link component={RouterLink} to={`/dataset/id/${ds.dataset_id}`}>{ds.dataset_name}</Link>
                     {ds.doi ? <span>&nbsp;<DOI style={{display: 'inline'}} parentheses doi={ds.doi}/></span> : ''}
                   </Typography>))}
               </div>}
diff --git a/gui/src/components/entry/OverviewView.spec.js b/gui/src/components/entry/OverviewView.spec.js
index 37fd83af11..7105b43ecb 100644
--- a/gui/src/components/entry/OverviewView.spec.js
+++ b/gui/src/components/entry/OverviewView.spec.js
@@ -103,7 +103,7 @@ test('correctly renders metadata and all properties', async () => {
   expect(within(authors).getByText(entry.authors[0].name)).toBeInTheDocument()
   const datasets = screen.getByTitle('A list of user curated datasets this entry belongs to.')
   expect(within(datasets).getByText('datasets')).toBeInTheDocument()
-  expect(within(datasets).getByText(entry.datasets[0].name)).toBeInTheDocument()
+  expect(within(datasets).getByText(entry.datasets[0].dataset_name)).toBeInTheDocument()
   const mainfile = screen.getByTitle('The path to the mainfile from the root directory of the uploaded files')
   expect(within(mainfile).getByText('mainfile')).toBeInTheDocument()
   expect(within(mainfile).getByText(entry.mainfile)).toBeInTheDocument()
diff --git a/gui/src/components/search/SearchContext.js b/gui/src/components/search/SearchContext.js
index 0ba8cd03f9..8ec1f9c433 100644
--- a/gui/src/components/search/SearchContext.js
+++ b/gui/src/components/search/SearchContext.js
@@ -191,7 +191,7 @@ registerFilter('results.properties.electronic.band_structure_electronic.channel_
 registerFilter('external_db', labelAuthor, listStatConfig, 'terms')
 registerFilter('authors.name', labelAuthor, listStatConfig, 'terms')
 registerFilter('upload_create_time', labelAuthor, InputDateRange, 'min_max', undefined, false)
-registerFilter('datasets.name', labelDataset, listStatConfig)
+registerFilter('datasets.dataset_name', labelDataset, listStatConfig)
 registerFilter('datasets.doi', labelDataset, listStatConfig)
 registerFilter('entry_id', labelIDs, listStatConfig)
 registerFilter('upload_id', labelIDs, listStatConfig)
diff --git a/gui/src/components/search/menus/FilterSubMenuDataset.js b/gui/src/components/search/menus/FilterSubMenuDataset.js
index c9a8e69583..4c81f667d1 100644
--- a/gui/src/components/search/menus/FilterSubMenuDataset.js
+++ b/gui/src/components/search/menus/FilterSubMenuDataset.js
@@ -33,7 +33,7 @@ const FilterSubMenuDataset = React.memo(({
       <Grid item xs={12}>
         <InputText
           label="dataset name"
-          quantity="datasets.name"
+          quantity="datasets.dataset_name"
           visible={visible}
           disableStatistics
         />
diff --git a/gui/src/components/search/results/SearchResultsEntries.js b/gui/src/components/search/results/SearchResultsEntries.js
index e0873c07a3..c75f0e9427 100644
--- a/gui/src/components/search/results/SearchResultsEntries.js
+++ b/gui/src/components/search/results/SearchResultsEntries.js
@@ -66,7 +66,7 @@ const columns = [
     render: entry => {
       const datasets = entry.datasets || []
       if (datasets.length > 0) {
-        return datasets.map(dataset => dataset.name).join(', ')
+        return datasets.map(dataset => dataset.dataset_name).join(', ')
       } else {
         return <i>no datasets</i>
       }
diff --git a/gui/tests/DFTBulk.js b/gui/tests/DFTBulk.js
index 9781334746..616f4adb86 100644
--- a/gui/tests/DFTBulk.js
+++ b/gui/tests/DFTBulk.js
@@ -37,7 +37,7 @@ const common = {
   comment: 'Mocked',
   references: ['doi'],
   authors: [{name: 'Lauri Himanen'}],
-  datasets: [{dataset_id: 'Mock dataset', name: 'Mock dataset'}],
+  datasets: [{dataset_id: 'Mock dataset', dataset_name: 'Mock dataset'}],
   mainfile: 'vasp.xml',
   formula: 'Si2'
 }
diff --git a/nomad/app/v1/models.py b/nomad/app/v1/models.py
index a6347749b7..221962b85b 100644
--- a/nomad/app/v1/models.py
+++ b/nomad/app/v1/models.py
@@ -996,7 +996,7 @@ class Metadata(WithQueryAndPagination):
             },
             'all_datasets': {
                 'terms': {
-                    'quantity': 'datasets.name',
+                    'quantity': 'datasets.dataset_name',
                     'pagination': {
                         'page_size': 100
                     }
diff --git a/nomad/app/v1/routers/datasets.py b/nomad/app/v1/routers/datasets.py
index e4d04261f6..7699c807b0 100644
--- a/nomad/app/v1/routers/datasets.py
+++ b/nomad/app/v1/routers/datasets.py
@@ -115,7 +115,7 @@ class DatasetType(str, enum.Enum):
 
 
 class DatasetCreate(BaseModel):  # type: ignore
-    name: Optional[str] = Field(None, description='The new name for the dataset.')
+    dataset_name: Optional[str] = Field(None, description='The new name for the dataset.')
     dataset_type: Optional[DatasetType] = Field(None)
     query: Optional[Query] = Field(None)
     entries: Optional[List[str]] = Field(None)
@@ -130,7 +130,7 @@ class DatasetCreate(BaseModel):  # type: ignore
 async def get_datasets(
         request: Request,
         dataset_id: str = FastApiQuery(None),
-        name: str = FastApiQuery(None),
+        dataset_name: str = FastApiQuery(None),
         user_id: str = FastApiQuery(None),
         dataset_type: str = FastApiQuery(None),
         doi: str = FastApiQuery(None),
@@ -140,9 +140,9 @@ async def get_datasets(
     Retrieves all datasets that match the given criteria.
     '''
     mongodb_objects = DatasetDefinitionCls.m_def.a_mongo.objects
-    query_params = dict(dataset_id=dataset_id, name=name, user_id=user_id, dataset_type=dataset_type, doi=doi)
+    query_params = dict(dataset_id=dataset_id, dataset_name=dataset_name, user_id=user_id, dataset_type=dataset_type, doi=doi)
     if prefix and prefix != '':
-        query_params.update(name=re.compile('^%s.*' % prefix, re.IGNORECASE))  # type: ignore
+        query_params.update(dataset_name=re.compile('^%s.*' % prefix, re.IGNORECASE))  # type: ignore
     query_params = {k: v for k, v in query_params.items() if v is not None}
 
     mongodb_query = mongodb_objects(**query_params)
@@ -207,7 +207,7 @@ async def post_datasets(
 
     # check if name already exists
     existing_dataset = DatasetDefinitionCls.m_def.a_mongo.objects(
-        user_id=user.user_id, name=create.name).first()
+        user_id=user.user_id, dataset_name=create.dataset_name).first()
     if existing_dataset is not None:
         raise HTTPException(
             status_code=_existing_name_response[0],
@@ -216,7 +216,7 @@ async def post_datasets(
     # create dataset
     dataset = DatasetDefinitionCls(
         dataset_id=create_uuid(),
-        name=create.name,
+        dataset_name=create.dataset_name,
         user_id=user.user_id,
         created=now,
         modified=now,
@@ -360,7 +360,7 @@ async def assign_doi(
             status_code=_bad_user_response[0],
             detail=_bad_user_response[1]['description'])
 
-    doi = DOI.create(title='NOMAD dataset: %s' % dataset.name, user=user)
+    doi = DOI.create(title='NOMAD dataset: %s' % dataset.dataset_name, user=user)
     doi.create_draft()
     doi.make_findable()
 
diff --git a/nomad/app/v1/routers/entries.py b/nomad/app/v1/routers/entries.py
index 10c2916540..e393eb8d48 100644
--- a/nomad/app/v1/routers/entries.py
+++ b/nomad/app/v1/routers/entries.py
@@ -1255,14 +1255,14 @@ async def post_entry_metadata_edit(
                 elif verify_reference == datamodel.Dataset:
                     try:
                         mongo_value = datamodel.Dataset.m_def.a_mongo.get(
-                            user_id=user.user_id, name=action_value).dataset_id
+                            user_id=user.user_id, dataset_name=action_value).dataset_id
                     except KeyError:
                         action.message = 'Dataset does not exist and will be created'
                         mongo_value = None
                         if not verify:
                             dataset = datamodel.Dataset(
                                 dataset_id=utils.create_uuid(), user_id=user.user_id,
-                                name=action_value, created=datetime.utcnow())
+                                dataset_name=action_value, created=datetime.utcnow())
                             dataset.a_mongo.create()
                             mongo_value = dataset.dataset_id
 
@@ -1299,7 +1299,7 @@ async def post_entry_metadata_edit(
                 if doi_ds is not None and not user.is_admin:
                     data.success = False
                     data.message = (data.message if data.message else '') + (
-                        'Edit would remove entries from a dataset with DOI (%s) ' % doi_ds.name)
+                        'Edit would remove entries from a dataset with DOI (%s) ' % doi_ds.dataset_name)
                     has_error = True
 
     # stop here, if client just wants to verify its actions
diff --git a/nomad/cli/client/integrationtests.py b/nomad/cli/client/integrationtests.py
index e4c417ffae..a50a08a6a8 100644
--- a/nomad/cli/client/integrationtests.py
+++ b/nomad/cli/client/integrationtests.py
@@ -52,7 +52,7 @@ def integrationtests(auth: api.Auth, skip_parsers: bool, skip_publish: bool, ski
 
         return upload
 
-    response = api.get('uploads', params=dict(name='integration_test_upload'), auth=auth)
+    response = api.get('uploads', params=dict(upload_name='integration_test_upload'), auth=auth)
     assert response.status_code == 200, response.text
     uploads = response.json()['data']
     assert len(uploads) == 0, 'the test upload must not exist before'
@@ -64,7 +64,7 @@ def integrationtests(auth: api.Auth, skip_parsers: bool, skip_publish: bool, ski
         command += ' -k'
         code = os.system(command)
         assert code == 0, 'curl command must be successful'
-        response = api.get('uploads', params=dict(name='integration_test_upload'), auth=auth)
+        response = api.get('uploads', params=dict(upload_name='integration_test_upload'), auth=auth)
         assert response.status_code == 200, response.text
         response_json = response.json()
         assert len(response_json['data']) == 1, 'exactly one test upload must be on the server'
@@ -82,7 +82,7 @@ def integrationtests(auth: api.Auth, skip_parsers: bool, skip_publish: bool, ski
     print('upload simple data with API')
     with open(simple_example_file, 'rb') as f:
         response = api.post(
-            'uploads', files=dict(file=f), params=dict(name='integration_test_upload'),
+            'uploads', files=dict(file=f), params=dict(upload_name='integration_test_upload'),
             auth=auth, headers={'Accept': 'application/json'})
         assert response.status_code == 200, response.text
         upload = response.json()['data']
@@ -187,7 +187,7 @@ def integrationtests(auth: api.Auth, skip_parsers: bool, skip_publish: bool, ski
         assert response.status_code == 200, response.text
 
         print('list datasets')
-        response = api.get('datasets', auth=auth, params=dict(name=dataset))
+        response = api.get('datasets', auth=auth, params=dict(dataset_name=dataset))
         assert response.status_code == 200, response.text
         response_json = response.json()
         assert len(response_json['data']) == 1, response.text
diff --git a/nomad/datamodel/datamodel.py b/nomad/datamodel/datamodel.py
index cda439e957..32cbbf2c55 100644
--- a/nomad/datamodel/datamodel.py
+++ b/nomad/datamodel/datamodel.py
@@ -201,7 +201,7 @@ class Dataset(metainfo.MSection):
     Args:
         dataset_id: The unique identifier for this dataset as a string. It should be
             a randomly generated UUID, similar to other nomad ids.
-        name: The human readable name of the dataset as string. The dataset name must be
+        dataset_name: The human readable name of the dataset as string. The dataset name must be
             unique for the user.
         user_id: The unique user_id of the owner and creator of this dataset. The owner
             must not change after creation.
@@ -226,7 +226,7 @@ class Dataset(metainfo.MSection):
         type=str,
         a_mongo=Mongo(primary_key=True),
         a_elasticsearch=Elasticsearch(material_entry_type))
-    name = metainfo.Quantity(
+    dataset_name = metainfo.Quantity(
         type=str,
         a_mongo=Mongo(index=True),
         a_elasticsearch=[
diff --git a/nomad/parsing/parsers.py b/nomad/parsing/parsers.py
index 68ff37d6a1..b0f47d9372 100644
--- a/nomad/parsing/parsers.py
+++ b/nomad/parsing/parsers.py
@@ -17,6 +17,7 @@
 #
 
 import os.path
+from typing import Dict
 
 from nomad import config
 from nomad.datamodel import results
@@ -258,7 +259,7 @@ parsers.append(BrokenParser())
 
 ''' Instantiation and constructor based config of all parsers. '''
 
-parser_dict = {parser.name: parser for parser in parsers + empty_parsers}  # type: ignore
+parser_dict: Dict[str, Parser] = {parser.name: parser for parser in parsers + empty_parsers}  # type: ignore
 ''' A dict to access parsers by name. Usually 'parsers/<...>', e.g. 'parsers/vasp'. '''
 
 # renamed parsers
diff --git a/nomad/processing/data.py b/nomad/processing/data.py
index a8d3b56770..4d401f0986 100644
--- a/nomad/processing/data.py
+++ b/nomad/processing/data.py
@@ -752,7 +752,7 @@ class Upload(Proc):
     and processing state.
 
     Attributes:
-        name: Optional user provided upload name.
+        upload_name: Optional user provided upload name.
         upload_path: The fs path were the uploaded files was stored during upload.
         temporary: True if the uploaded file should be removed after extraction.
 
@@ -853,7 +853,7 @@ class Upload(Proc):
     @classmethod
     def create(cls, **kwargs) -> 'Upload':
         '''
-        Creates a new upload for the given user, a user given name is optional.
+        Creates a new upload for the given user, a user given upload_name is optional.
         It will populate the record with a signed url and pending :class:`UploadProc`.
         The upload will be already saved to the database.
 
@@ -1082,7 +1082,7 @@ class Upload(Proc):
                 for oasis_dataset in oasis_datasets.values():
                     try:
                         existing_dataset = datamodel.Dataset.m_def.a_mongo.get(
-                            user_id=self.user_id, name=oasis_dataset['name'])
+                            user_id=self.user_id, dataset_name=oasis_dataset['dataset_name'])
                     except KeyError:
                         datamodel.Dataset(**oasis_dataset).a_mongo.save()
                     else:
@@ -1726,7 +1726,7 @@ class Upload(Proc):
             required_keys_entry_metadata = (
                 'calc_hash',)
             required_keys_datasets = (
-                'dataset_id', 'name', 'user_id')
+                'dataset_id', 'dataset_name', 'user_id')
 
             keys_exist(bundle_info, required_keys_root_level, 'Missing key in bundle_info.json: {key}')
 
@@ -1792,12 +1792,12 @@ class Upload(Proc):
                     check_user_ids([dataset_dict['user_id']], 'Invalid dataset creator id: {id}')
                     dataset_id = dataset_dict['dataset_id']
                     try:
-                        existing_dataset = datamodel.Dataset.m_def.a_mongo.get(name=dataset_dict['name'])
-                        # Dataset by the given name already exists
+                        existing_dataset = datamodel.Dataset.m_def.a_mongo.get(dataset_name=dataset_dict['dataset_name'])
+                        # Dataset by the given dataset_name already exists
                         assert existing_dataset.user_id == dataset_dict['user_id'], (
-                            'A dataset with the same name but different creator exists')
+                            'A dataset with the same dataset_name but different creator exists')
                         dataset_id_mapping[dataset_id] = existing_dataset.dataset_id
-                        # Note, it may be that a dataset with the same name and creator
+                        # Note, it may be that a dataset with the same dataset_name and creator
                         # is created in both environments. In that case, we consider them
                         # to be the "same" dataset, even if they do not have the same dataset_id.
                         # Thus, in that case the dataset id needs to be translated.
diff --git a/tests/app/v1/routers/test_datasets.py b/tests/app/v1/routers/test_datasets.py
index 4dd30bf07f..9873612b56 100644
--- a/tests/app/v1/routers/test_datasets.py
+++ b/tests/app/v1/routers/test_datasets.py
@@ -59,12 +59,12 @@ def data(elastic, raw_files, mongo, test_user, other_test_user):
             create_dataset(
                 dataset_id='dataset_1',
                 user_id=test_user.user_id,
-                name='test dataset 1',
+                dataset_name='test dataset 1',
                 dataset_type='owned'),
             create_dataset(
                 dataset_id='dataset_2',
                 user_id=test_user.user_id,
-                name='test dataset 2',
+                dataset_name='test dataset 2',
                 dataset_type='owned')
         ])
 
@@ -76,12 +76,12 @@ def data(elastic, raw_files, mongo, test_user, other_test_user):
             create_dataset(
                 dataset_id='dataset_listed',
                 user_id=test_user.user_id,
-                name='foreign test dataset',
+                dataset_name='foreign test dataset',
                 dataset_type='foreign'),
             create_dataset(
                 dataset_id='dataset_doi',
                 user_id=test_user.user_id,
-                name='foreign test dataset',
+                dataset_name='foreign test dataset',
                 dataset_type='foreign',
                 doi='test_doi')
         ])
@@ -115,7 +115,7 @@ def assert_pagination(pagination):
 def assert_dataset(dataset, query: Query = None, entries: List[str] = None, n_entries: int = -1, **kwargs):
     for key, value in kwargs.items():
         if key == 'prefix':
-            assert dataset['name'].startswith(value)
+            assert dataset['dataset_name'].startswith(value)
         else:
             assert dataset[key] == value
 
@@ -168,7 +168,7 @@ def assert_dataset_deleted(dataset_id):
 @pytest.mark.parametrize('query, size, status_code', [
     pytest.param({}, 4, 200, id='empty'),
     pytest.param({'dataset_id': 'dataset_1'}, 1, 200, id='id'),
-    pytest.param({'name': 'test dataset 1'}, 1, 200, id='name'),
+    pytest.param({'dataset_name': 'test dataset 1'}, 1, 200, id='dataset_name'),
     pytest.param({'prefix': 'test dat'}, 2, 200, id='prefix'),
     pytest.param({'dataset_type': 'foreign'}, 2, 200, id='type'),
     pytest.param({'doi': 'test_doi'}, 1, 200, id='doi'),
@@ -205,7 +205,7 @@ def test_dataset(client, data, dataset_id, result, status_code):
     assert_dataset(response.json()['data'], **result)
 
 
-@pytest.mark.parametrize('name, dataset_type, query, entries, user, status_code', [
+@pytest.mark.parametrize('dataset_name, dataset_type, query, entries, user, status_code', [
     pytest.param('another test dataset', 'foreign', None, None, 'test_user', 200, id='plain'),
     pytest.param('another test dataset', 'foreign', None, None, None, 401, id='no-user'),
     pytest.param('test dataset 1', 'foreign', None, None, 'test_user', 400, id='exists'),
@@ -219,8 +219,8 @@ def test_dataset(client, data, dataset_id, result, status_code):
 ])
 def test_post_datasets(
         client, data, example_entries, test_user, test_user_auth, other_test_user,
-        other_test_user_auth, name, dataset_type, query, entries, user, status_code):
-    dataset = {'name': name, 'dataset_type': dataset_type}
+        other_test_user_auth, dataset_name, dataset_type, query, entries, user, status_code):
+    dataset = {'dataset_name': dataset_name, 'dataset_type': dataset_type}
     if query is not None:
         dataset['query'] = query
     if entries is not None:
@@ -243,7 +243,7 @@ def test_post_datasets(
     dataset = json_response['data']
     assert_dataset(
         dataset, query=query, entries=entries,
-        user_id=user.user_id, name=name, dataset_type=dataset_type)
+        user_id=user.user_id, dataset_name=dataset_name, dataset_type=dataset_type)
     assert Dataset.m_def.a_mongo.objects().count() == 5
 
 
diff --git a/tests/app/v1/routers/test_entries_edit.py b/tests/app/v1/routers/test_entries_edit.py
index 191238bade..03fb9764bc 100644
--- a/tests/app/v1/routers/test_entries_edit.py
+++ b/tests/app/v1/routers/test_entries_edit.py
@@ -41,11 +41,11 @@ class TestEditRepo():
     @pytest.fixture(autouse=True)
     def example_datasets(self, test_user, other_test_user, mongo):
         self.example_dataset = Dataset(
-            dataset_id='example_ds', name='example_ds', user_id=test_user.user_id)
+            dataset_id='example_ds', dataset_name='example_ds', user_id=test_user.user_id)
         self.example_dataset.a_mongo.create()
 
         self.other_example_dataset = Dataset(
-            dataset_id='other_example_ds', name='other_example_ds',
+            dataset_id='other_example_ds', dataset_name='other_example_ds',
             user_id=other_test_user.user_id)
         self.other_example_dataset.a_mongo.create()
 
@@ -241,27 +241,27 @@ class TestEditRepo():
 
     def test_edit_ds(self):
         rv = self.perform_edit(
-            datasets=[self.example_dataset.name], query=self.query('upload_1'))
+            datasets=[self.example_dataset.dataset_name], query=self.query('upload_1'))
         self.assert_edit(rv, quantity='datasets', success=True, message=False)
         assert self.mongo(1, datasets=[self.example_dataset.dataset_id])
 
     def test_edit_ds_remove_doi(self):
         rv = self.perform_edit(
-            datasets=[self.example_dataset.name], query=self.query('upload_1'))
+            datasets=[self.example_dataset.dataset_name], query=self.query('upload_1'))
 
         assert rv.status_code == 200
-        rv = self.api.post('datasets/%s/action/doi' % self.example_dataset.name, headers=self.test_user_auth)
+        rv = self.api.post('datasets/%s/action/doi' % self.example_dataset.dataset_name, headers=self.test_user_auth)
         assert rv.status_code == 200
         rv = self.perform_edit(datasets=[], query=self.query('upload_1'))
         assert rv.status_code == 400
         data = rv.json()
         assert not data['success']
-        assert self.example_dataset.name in data['message']
+        assert self.example_dataset.dataset_name in data['message']
         assert Dataset.m_def.a_mongo.get(dataset_id=self.example_dataset.dataset_id) is not None
 
     def test_edit_ds_remove(self):
         rv = self.perform_edit(
-            datasets=[self.example_dataset.name], query=self.query('upload_1'))
+            datasets=[self.example_dataset.dataset_name], query=self.query('upload_1'))
         assert rv.status_code == 200
         rv = self.perform_edit(datasets=[], query=self.query('upload_1'))
         assert rv.status_code == 200
@@ -270,14 +270,14 @@ class TestEditRepo():
 
     def test_edit_ds_user_namespace(self, test_user):
         assert Dataset.m_def.a_mongo.objects(
-            name=self.other_example_dataset.name).first() is not None
+            dataset_name=self.other_example_dataset.dataset_name).first() is not None
 
         rv = self.perform_edit(
-            datasets=[self.other_example_dataset.name], query=self.query('upload_1'))
+            datasets=[self.other_example_dataset.dataset_name], query=self.query('upload_1'))
 
         self.assert_edit(rv, quantity='datasets', success=True, message=True)
         new_dataset = Dataset.m_def.a_mongo.objects(
-            name=self.other_example_dataset.name,
+            dataset_name=self.other_example_dataset.dataset_name,
             user_id=test_user.user_id).first()
         assert new_dataset is not None
         assert self.mongo(1, datasets=[new_dataset.dataset_id])
@@ -285,7 +285,7 @@ class TestEditRepo():
     def test_edit_new_ds(self, test_user):
         rv = self.perform_edit(datasets=['new_dataset'], query=self.query('upload_1'))
         self.assert_edit(rv, quantity='datasets', success=True, message=True)
-        new_dataset = Dataset.m_def.a_mongo.objects(name='new_dataset').first()
+        new_dataset = Dataset.m_def.a_mongo.objects(dataset_name='new_dataset').first()
         assert new_dataset is not None
         assert new_dataset.user_id == test_user.user_id
         assert self.mongo(1, datasets=[new_dataset.dataset_id])
diff --git a/tests/app/v1/routers/test_uploads.py b/tests/app/v1/routers/test_uploads.py
index 9b9f8fa0a8..d1e7f570c9 100644
--- a/tests/app/v1/routers/test_uploads.py
+++ b/tests/app/v1/routers/test_uploads.py
@@ -1260,7 +1260,7 @@ def test_post_upload_action_publish_to_central_nomad(
     # create a dataset to also test this aspect of oasis uploads
     calc = old_upload.calcs[0]
     datamodel.Dataset(
-        dataset_id='dataset_id', name='dataset_name',
+        dataset_id='dataset_id', dataset_name='dataset_name',
         user_id=test_users_dict[user].user_id).a_mongo.save()
     calc.metadata['datasets'] = ['dataset_id']
     calc.save()
diff --git a/tests/conftest.py b/tests/conftest.py
index 755e1b4868..a84fbf3947 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -622,12 +622,12 @@ def oasis_example_upload(non_empty_example_upload: str, test_user, raw_files) ->
             'dataset_1_name': {
                 'dataset_id': 'oasis_dataset_1',
                 'user_id': test_user.user_id,
-                'name': 'dataset_1_name'
+                'dataset_name': 'dataset_1_name'
             },
             'dataset_2_name': {
                 'dataset_id': 'oasis_dataset_2',
                 'user_id': test_user.user_id,
-                'name': 'dataset_2_name'
+                'dataset_name': 'dataset_2_name'
             }
         }
     }
diff --git a/tests/processing/test_data.py b/tests/processing/test_data.py
index 68d74d9d60..dcb21cf367 100644
--- a/tests/processing/test_data.py
+++ b/tests/processing/test_data.py
@@ -255,7 +255,7 @@ def test_oasis_upload_processing(proc_infra, oasis_example_uploaded: Tuple[str,
     # create a dataset to force dataset joining of one of the datasets in the example
     # upload
     datamodel.Dataset(
-        dataset_id='cn_dataset_2', name='dataset_2_name',
+        dataset_id='cn_dataset_2', dataset_name='dataset_2_name',
         user_id=test_user.user_id).a_mongo.save()
 
     upload = Upload.create(
diff --git a/tests/test_datamodel.py b/tests/test_datamodel.py
index 7babc32102..b3303d7b58 100644
--- a/tests/test_datamodel.py
+++ b/tests/test_datamodel.py
@@ -54,11 +54,11 @@ def _gen_user():
 
 
 def _gen_dataset():
-    id, name = random.choice(datasets)
+    id, dataset_name = random.choice(datasets)
     id_str = str(id)
     if datamodel.Dataset.m_def.a_mongo.objects(dataset_id=id_str).first() is None:
         datamodel.Dataset(
-            user_id=random.choice(users), dataset_id=id_str, name=name,
+            user_id=random.choice(users), dataset_id=id_str, dataset_name=dataset_name,
             doi=_gen_ref().value).a_mongo.create()
     return id_str
 
-- 
GitLab