diff --git a/gui/src/components/entry/EditUserMetadataDialog.js b/gui/src/components/entry/EditUserMetadataDialog.js index f43938fcd0e1618e1421e5a14169b7d6cffbb507..31ccda4b20502343deb88291584c7b9296909a83 100644 --- a/gui/src/components/entry/EditUserMetadataDialog.js +++ b/gui/src/components/entry/EditUserMetadataDialog.js @@ -777,7 +777,7 @@ class EditUserMetadataDialogUnstyled extends React.Component { comment: '', references: [], entry_coauthors: [], - shared_with: [], + reviewers: [], datasets: [] } this.unmounted = false @@ -811,7 +811,7 @@ class EditUserMetadataDialogUnstyled extends React.Component { entry_coauthors: (example.authors || []) .filter(user => user.user_id !== example.uploader.user_id) .map(user => user.user_id), - shared_with: (example.owners || []) + reviewers: (example.owners || []) .filter(user => user.user_id !== example.uploader.user_id) .map(user => user.user_id), datasets: (example.datasets || []).map(ds => ds.dataset_name) @@ -1059,11 +1059,11 @@ class EditUserMetadataDialogUnstyled extends React.Component { label="Co-author" /> </UserMetadataField> - <UserMetadataField {...metadataFieldProps('shared_with', true)}> + <UserMetadataField {...metadataFieldProps('reviewers', true)}> <ListTextInput component={UserInput} - {...listTextInputProps('shared_with', true)} - label="Shared with" + {...listTextInputProps('reviewers', true)} + label="Reviewers" /> </UserMetadataField> <UserMetadataField {...metadataFieldProps('datasets', true)}> diff --git a/nomad/app/optimade/elasticsearch.py b/nomad/app/optimade/elasticsearch.py index f4ce421217be92fb63a3e4031a3ef9d6a4e7d899..b25f9d1040842a39af4a640ca4fdde9a3344d02c 100644 --- a/nomad/app/optimade/elasticsearch.py +++ b/nomad/app/optimade/elasticsearch.py @@ -209,7 +209,7 @@ class StructureCollection(EntryCollection): attrs = archive.metadata.optimade.m_to_dict() attrs['immutable_id'] = entry_id - attrs['last_modified'] = archive.metadata.last_processing_time if archive.metadata.last_processing_time is not None else archive.metadata.upload_create_time + attrs['last_modified'] = archive.metadata.upload_create_time # TODO this should be removed, once all data is reprocessed with the right normalization attrs['chemical_formula_reduced'] = optimade_chemical_formula_reduced( diff --git a/nomad/cli/client/integrationtests.py b/nomad/cli/client/integrationtests.py index a8a2b49e0cc7e18d4125efb956589a72501fad75..a62dfcf2c5e3e92b72e7404be17f843a7a0beffa 100644 --- a/nomad/cli/client/integrationtests.py +++ b/nomad/cli/client/integrationtests.py @@ -177,7 +177,6 @@ def integrationtests(auth: api.Auth, skip_parsers: bool, skip_publish: bool, ski 'comment': {'value': 'Test comment'}, 'references': [{'value': 'http;//test_reference.com'}], 'entry_coauthors': [{'value': user['user_id']}], - 'shared_with': [{'value': user['user_id']}], 'datasets': [{'value': dataset}]} response = api.post( diff --git a/nomad/cli/dev.py b/nomad/cli/dev.py index 05984ae92698bbb3e2dec0d83022a4c6be476e8b..34e965e596d923d36167a6ba67f429d3828560c9 100644 --- a/nomad/cli/dev.py +++ b/nomad/cli/dev.py @@ -305,9 +305,11 @@ def example_data(username: str): data = ExampleData(uploader=user) # one upload with two calc published with embargo, one shared + upload_id = utils.create_uuid() + data.create_upload(upload_id=upload_id, published=True, embargo_length=0) data.create_entry( calc_id=utils.create_uuid(), - upload_id=utils.create_uuid(), + upload_id=upload_id, mainfile='test_content/test_embargo_entry/mainfile.json') data.save(with_files=True, with_es=True, with_mongo=True) diff --git a/nomad/datamodel/datamodel.py b/nomad/datamodel/datamodel.py index d087683175a2ff32d92192eab96fc64b54f3aefd..771de0a0b370edf2ee79b1a1694a464ca6ebee1d 100644 --- a/nomad/datamodel/datamodel.py +++ b/nomad/datamodel/datamodel.py @@ -370,14 +370,14 @@ class EntryMetadata(metainfo.MSection): comment: An arbitrary string with user provided information about the entry. references: A list of URLs for resources that are related to the entry. uploader: Id of the uploader of this entry. + reviewers: Ids of users who can review the upload which this entry belongs to. Like the + uploader, reviewers can find, see, and download all data from the upload, even + if it is in staging or has an embargo. entry_coauthors: Ids of all co-authors (excl. the uploader) specified on the entry level, rather than on the upload level. They are shown as authors of this entry alongside its uploader. - shared_with: Ids of all users that this entry is shared with. These users can find, - see, and download all data for this entry, even if it is in staging or - has an embargo. with_embargo: Entries with embargo are only visible to the uploader, the admin - user, and users the entry is shared with (see shared_with). + user, and users registered as reviewers of the uplod (see reviewers). upload_create_time: The time that the upload was created entry_create_time: The time that the entry was created publish_time: The time when the upload was published @@ -518,14 +518,17 @@ class EntryMetadata(metainfo.MSection): derived=derive_authors, a_elasticsearch=Elasticsearch(material_entry_type, metrics=dict(n_authors='cardinality'))) - shared_with = metainfo.Quantity( - type=user_reference, shape=['0..*'], default=[], categories=[MongoEntryMetadata, EditableUserMetadata], - description='A user provided list of userts to share the entry with') + reviewers = metainfo.Quantity( + type=user_reference, shape=['0..*'], default=[], categories=[MongoUploadMetadata, EditableUserMetadata], + description=''' + A user provided list of reviewers. Reviewers can see the whole upload, also if + it is unpublished or embargoed + ''') owners = metainfo.Quantity( type=user_reference, shape=['0..*'], description='All owner (uploader and shared with users)', - derived=lambda entry: ([entry.uploader] if entry.uploader is not None else []) + entry.shared_with, + derived=lambda entry: ([entry.uploader] if entry.uploader is not None else []) + entry.reviewers, a_elasticsearch=Elasticsearch(material_entry_type)) license = metainfo.Quantity( diff --git a/nomad/processing/data.py b/nomad/processing/data.py index 56378933e458cb88f40755e863b86cdd31e53660..2d98bbef2a3295d50da3faeb7d691c80fccb3ee0 100644 --- a/nomad/processing/data.py +++ b/nomad/processing/data.py @@ -181,7 +181,6 @@ class Calc(Proc): comment = StringField() references = ListField(StringField(), default=None) entry_coauthors = ListField(StringField(), default=None) - shared_with = ListField(StringField(), default=None) datasets = ListField(StringField(), default=None) meta: Any = { @@ -790,13 +789,12 @@ class Upload(Proc): and processing state. Attributes: - upload_name: Optional user provided upload name. - upload_path: The fs path were the uploaded files was stored during upload. - temporary: True if the uploaded file should be removed after extraction. - upload_id: The upload id generated by the database or the uploaded NOMAD deployment. + upload_name: Optional user provided upload name. upload_create_time: Datetime of creation of the upload. user_id: The id of the user that created this upload. + reviewers: A user provided list of reviewers. Reviewers can see the whole upload, + also if it is unpublished or embargoed. publish_time: Datetime when the upload was initially published on this NOMAD deployment. last_update: Datetime of the last modifying process run (publish, processing, upload). @@ -811,8 +809,9 @@ class Upload(Proc): upload_id = StringField(primary_key=True) upload_name = StringField(default=None) - user_id = StringField(required=True) upload_create_time = DateTimeField(required=True) + user_id = StringField(required=True) + reviewers = ListField(StringField(), default=None) last_update = DateTimeField() publish_time = DateTimeField() embargo_length = IntField(default=0, required=True) @@ -1719,9 +1718,13 @@ class Upload(Proc): published = upload_dict.get('publish_time') is not None if published: assert bundle_info['entries'], 'Upload published but no entries in bundle_info.json' + # Check user references + check_user_ids([upload_dict['user_id']], 'Invalid user_id: {id}') + check_user_ids(upload_dict.get('reviewers', []), 'Invalid reviewers reference: {id}') # Define which keys we think okay to copy from the bundle upload_keys_to_copy = [ - 'upload_name', 'embargo_length', 'license', 'from_oasis', 'oasis_deployment_id'] + 'upload_name', 'user_id', 'reviewers', 'embargo_length', 'license', + 'from_oasis', 'oasis_deployment_id'] if settings.keep_original_timestamps: upload_keys_to_copy.extend(('upload_create_time', 'publish_time',)) try: @@ -1786,7 +1789,6 @@ class Upload(Proc): assert entry_dict['_id'] == generate_entry_id(self.upload_id, entry_dict['mainfile']), ( 'Provided entry id does not match generated value') check_user_ids(entry_dict.get('entry_coauthors', []), 'Invalid entry_coauthor reference: {id}') - check_user_ids(entry_dict.get('shared_with', []), 'Invalid shared_with reference: {id}') # Instantiate an entry object from the json, and validate it entry_keys_to_copy = list(_mongo_entry_metadata) diff --git a/tests/app/flask/test_dcat.py b/tests/app/flask/test_dcat.py index 708578a48388a5c070acf6e3f532bf6c9e9994a5..fca9723fc06723373d9b6a58d2f498053c38fe35 100644 --- a/tests/app/flask/test_dcat.py +++ b/tests/app/flask/test_dcat.py @@ -49,21 +49,19 @@ def data(test_user, other_test_user, elastic_infra): example_attrs = dict( entry_id='test-id', upload_id='upload-id', - upload_create_time=datetime.now(), last_processing_time=datetime.now(), - uploader=test_user, entry_coauthors=[other_test_user], - comment='this is a calculation comment', - published=True) + comment='this is a calculation comment') - data = ExampleData() + data = ExampleData(uploader=test_user) + data.create_upload( + upload_id='upload-id', upload_create_time=datetime(2000, 1, 1), published=True, embargo_length=0) archive = data.create_entry(**example_attrs) archive.m_create(Results).m_create(Material).chemical_formula_descriptive = 'H2O' for i in range(1, 11): example_attrs.update( entry_id='test-id-%d' % i, - upload_create_time=datetime(2000, 1, 1), last_processing_time=datetime(2020, 1, i)) data.create_entry(**example_attrs) diff --git a/tests/app/test_optimade.py b/tests/app/test_optimade.py index f0a44da6231d39edcd8461979d007d55586032d9..de71e0907cd97486b4cf84dbc733601663a5fdee 100644 --- a/tests/app/test_optimade.py +++ b/tests/app/test_optimade.py @@ -39,10 +39,11 @@ def test_get_entry(published: Upload): assert 'optimade.chemical_formula_hill' in utils.flat(search_result) -def test_no_optimade(mongo, elastic, raw_files, client): - example_data = ExampleData() - example_data.create_structure(1, 2, 1, [], 0) - example_data.create_structure(2, 2, 1, [], 0, optimade=False) +def test_no_optimade(mongo, elastic, raw_files, client, test_user): + example_data = ExampleData(uploader=test_user) + example_data.create_upload(upload_id='test_upload', published=True, embargo_length=0) + example_data.create_structure('test_upload', 1, 2, 1, [], 0) + example_data.create_structure('test_upload', 2, 2, 1, [], 0, optimade=False) example_data.save() rv = client.get('/optimade/structures') @@ -52,16 +53,18 @@ def test_no_optimade(mongo, elastic, raw_files, client): @pytest.fixture(scope='module') -def example_structures(elastic_infra, mongo_infra, raw_files_infra): +def example_structures(elastic_infra, mongo_infra, raw_files_infra, test_user): clear_elastic(elastic_infra) mongo_infra.drop_database('test_db') - example_data = ExampleData() - example_data.create_structure(1, 2, 1, [], 0) - example_data.create_structure(2, 2, 1, ['C'], 0) - example_data.create_structure(3, 2, 1, [], 1) - example_data.create_structure( - 4, 1, 1, [], 0, metadata=dict(upload_create_time='1978-04-08T10:10:00Z')) + example_data = ExampleData(uploader=test_user) + example_data.create_upload( + upload_id='test_upload', upload_create_time='1978-04-08T10:10:00Z', + published=True, embargo_length=0) + example_data.create_structure('test_upload', 1, 2, 1, [], 0) + example_data.create_structure('test_upload', 2, 2, 1, ['C'], 0) + example_data.create_structure('test_upload', 3, 2, 1, [], 1) + example_data.create_structure('test_upload', 4, 1, 1, [], 0, metadata=dict(comment='A comment')) example_data.save() yield @@ -128,7 +131,7 @@ def example_structures(elastic_infra, mongo_infra, raw_files_infra): ('LENGTH nelements = 1', -1), ('chemical_formula_anonymous starts with "A"', -1), ('elements HAS ONY "H", "O"', -1), - ('last_modified >= "2009-02-01T20:07:00Z"', 3), + ('last_modified >= "2009-02-01T20:07:00Z"', 0), ('species_at_sites HAS "C"', 1), ('_nmd_results_material_structural_type = "molecule / cluster"', 3), ('_nmd_results_material_chemical_formula_reduced = "H20"', 0) diff --git a/tests/app/v1/conftest.py b/tests/app/v1/conftest.py index ecb333b7f1e445b9e494cc6164b227828f50c834..b5a7961811c03520802225502fba388d983f73ee 100644 --- a/tests/app/v1/conftest.py +++ b/tests/app/v1/conftest.py @@ -60,16 +60,18 @@ def example_data(elastic_module, raw_files_module, mongo_module, test_user, othe upload_id='id_embargo', calc_id='id_embargo', material_id='id_embargo', - mainfile='test_content/test_embargo_entry/mainfile.json', - shared_with=[], - with_embargo=True) + mainfile='test_content/test_embargo_entry/mainfile.json') + data.create_upload( + upload_id='id_embargo_shared_upload', + upload_name='name_embargo_shared', + published=True, + reviewers=[other_test_user.user_id], + embargo_length=12) data.create_entry( - upload_id='id_embargo', + upload_id='id_embargo_shared_upload', calc_id='id_embargo_shared', material_id='id_embargo_shared', - mainfile='test_content/test_embargo_entry_shared/mainfile.json', - shared_with=[other_test_user], - with_embargo=True) + mainfile='test_content/test_embargo_entry_shared/mainfile.json') # one upload with two calc in staging, one shared data.create_upload( @@ -79,18 +81,16 @@ def example_data(elastic_module, raw_files_module, mongo_module, test_user, othe upload_id='id_unpublished', calc_id='id_unpublished', material_id='id_unpublished', - mainfile='test_content/test_entry/mainfile.json', - with_embargo=False, - shared_with=[], - published=False) + mainfile='test_content/test_entry/mainfile.json') + data.create_upload( + upload_id='id_unpublished_shared_upload', + published=False, + reviewers=[other_test_user.user_id]) data.create_entry( - upload_id='id_unpublished', + upload_id='id_unpublished_shared_upload', calc_id='id_unpublished_shared', material_id='id_unpublished_shared', - mainfile='test_content/test_entry_shared/mainfile.json', - shared_with=[other_test_user], - with_embargo=False, - published=False) + mainfile='test_content/test_entry_shared/mainfile.json') # one upload with 23 calcs published data.create_upload( @@ -145,9 +145,7 @@ def example_data_writeable(mongo, test_user, normalized): data.create_entry( upload_id='id_published_w', calc_id='id_published_w_entry', - mainfile='test_content/test_embargo_entry/mainfile.json', - shared_with=[], - with_embargo=True) + mainfile='test_content/test_embargo_entry/mainfile.json') # one upload with one entry, unpublished data.create_upload( @@ -157,10 +155,7 @@ def example_data_writeable(mongo, test_user, normalized): data.create_entry( upload_id='id_unpublished_w', calc_id='id_unpublished_w_entry', - mainfile='test_content/test_embargo_entry/mainfile.json', - shared_with=[], - with_embargo=True, - published=False) + mainfile='test_content/test_embargo_entry/mainfile.json') # one upload, no entries, still processing data.create_upload( diff --git a/tests/app/v1/routers/common.py b/tests/app/v1/routers/common.py index b60f1019bb463bf744b103fa57ee7256ccc0d311..ba7a6a0224d9f9d0b256e56e879a8f35492bfe2e 100644 --- a/tests/app/v1/routers/common.py +++ b/tests/app/v1/routers/common.py @@ -180,7 +180,7 @@ def aggregation_test_parameters(entity_id: str, material_prefix: str, entry_pref ), pytest.param( {'terms': {'quantity': f'{entry_prefix}upload_id'}}, - 3, 3, 200, 'test_user', id='default'), + 5, 5, 200, 'test_user', id='default'), pytest.param( { 'terms': { @@ -188,7 +188,7 @@ def aggregation_test_parameters(entity_id: str, material_prefix: str, entry_pref 'pagination': {'order_by': f'{entry_prefix}uploader.user_id'} } }, - 3, 3, 200, 'test_user', id='order-str'), + 5, 5, 200, 'test_user', id='order-str'), pytest.param( { 'terms': { @@ -196,7 +196,7 @@ def aggregation_test_parameters(entity_id: str, material_prefix: str, entry_pref 'pagination': {'order_by': upload_create_time} } }, - 3, 3, 200, 'test_user', id='order-date'), + 5, 5, 200, 'test_user', id='order-date'), pytest.param( { 'terms': { @@ -204,7 +204,7 @@ def aggregation_test_parameters(entity_id: str, material_prefix: str, entry_pref 'pagination': {'order_by': f'{entry_prefix}results.properties.n_calculations'} } }, - 3, 3, 200, 'test_user', id='order-int'), + 5, 5, 200, 'test_user', id='order-int'), pytest.param( {'terms': {'quantity': f'{material_prefix}symmetry.structure_name'}}, 0, 0, 200, 'test_user', id='no-results'), @@ -215,7 +215,7 @@ def aggregation_test_parameters(entity_id: str, material_prefix: str, entry_pref 'pagination': {'page_after_value': 'id_published'} } }, - 3, 1, 200, 'test_user', id='after'), + 5, 2, 200, 'test_user', id='after'), pytest.param( { 'terms': { @@ -226,13 +226,13 @@ def aggregation_test_parameters(entity_id: str, material_prefix: str, entry_pref } } }, - 3, 1, 200, 'test_user', id='after-order'), + 5, 2, 200, 'test_user', id='after-order'), pytest.param( {'terms': {'quantity': f'{entry_prefix}upload_id', 'entries': {'size': 10}}}, - 3, 3, 200, 'test_user', id='entries'), + 5, 5, 200, 'test_user', id='entries'), pytest.param( {'terms': {'quantity': f'{entry_prefix}upload_id', 'entries': {'size': 1}}}, - 3, 3, 200, 'test_user', id='entries-size'), + 5, 5, 200, 'test_user', id='entries-size'), pytest.param( {'terms': {'quantity': f'{entry_prefix}upload_id', 'entries': {'size': 0}}}, -1, -1, 422, 'test_user', id='bad-entries'), @@ -248,7 +248,7 @@ def aggregation_test_parameters(entity_id: str, material_prefix: str, entry_pref } } }, - 3, 3, 200, 'test_user', id='entries-include'), + 5, 5, 200, 'test_user', id='entries-include'), pytest.param( {'terms': {'quantity': program_name}}, n_code_names, n_code_names, 200, None, id='fixed-values'), @@ -280,7 +280,7 @@ def aggregation_test_parameters(entity_id: str, material_prefix: str, entry_pref 'pagination': {'order': 'asc'} } }, - 3, 3, 200, 'test_user', id='order-direction'), + 5, 5, 200, 'test_user', id='order-direction'), pytest.param( {'terms': {'quantity': 'does not exist'}}, -1, -1, 422, None, id='bad-quantity'), diff --git a/tests/app/v1/routers/test_datasets.py b/tests/app/v1/routers/test_datasets.py index efcf89f8ed9ee08ac3f301cc976e50964b91e152..078b4cf6c97612863a170f93d3b4ba1ee581c6a2 100644 --- a/tests/app/v1/routers/test_datasets.py +++ b/tests/app/v1/routers/test_datasets.py @@ -51,6 +51,7 @@ def data(elastic, raw_files, mongo, test_user, other_test_user): return dataset data = ExampleData(uploader=test_user) + data.create_upload(upload_id='upload_1', published=True) data.create_entry( upload_id='upload_1', calc_id='entry_1', @@ -85,7 +86,7 @@ def data(elastic, raw_files, mongo, test_user, other_test_user): dataset_type='foreign', doi='test_doi') ]) - + data.create_upload(upload_id='other_data', published=True) for i in range(1, 4): data.create_entry( upload_id='other_data', diff --git a/tests/app/v1/routers/test_entries.py b/tests/app/v1/routers/test_entries.py index 7d9555b75ede1e3f46c93641658b6e4e5ce69740..5299a018f4b548fc27145134ca795a2e0d97ea94 100644 --- a/tests/app/v1/routers/test_entries.py +++ b/tests/app/v1/routers/test_entries.py @@ -339,7 +339,7 @@ def test_entries_all_metrics(client, data): } } }, - 3, 3, 200, 'test_user', id='entries-exclude'), + 5, 5, 200, 'test_user', id='entries-exclude'), pytest.param( {'terms': {'quantity': 'entry_id', 'value_filter': '_0'}}, 9, 9, 200, None, id='filter'), @@ -489,16 +489,14 @@ def example_data_with_compressed_files(elastic_module, raw_files_module, mongo_m data.create_entry( upload_id='with_compr_published', calc_id='with_compr_published', - mainfile='test_content/test_entry/mainfile.json', - shared_with=[]) + mainfile='test_content/test_entry/mainfile.json') data.create_upload( upload_id='with_compr_unpublished', published=False) data.create_entry( upload_id='with_compr_unpublished', calc_id='with_compr_unpublished', - mainfile='test_content/test_entry/mainfile.json', - shared_with=[]) + mainfile='test_content/test_entry/mainfile.json') data.save() diff --git a/tests/app/v1/routers/test_entries_edit.py b/tests/app/v1/routers/test_entries_edit.py index 1530fbfe06b3d5e0a05bd4a9c20f55e01cbf72b9..998e5722329c8119975fbbbdeabc40ccbfe59273 100644 --- a/tests/app/v1/routers/test_entries_edit.py +++ b/tests/app/v1/routers/test_entries_edit.py @@ -55,16 +55,12 @@ class TestEditRepo(): example_data = ExampleData() example_data.create_upload('upload_1', user_id=test_user.user_id, published=True, embargo_length=0) - example_data.create_entry( - upload_id='upload_1', uploader=test_user, published=True, with_embargo=False) + example_data.create_entry(upload_id='upload_1') example_data.create_upload('upload_2', user_id=test_user.user_id, published=True, embargo_length=36) - example_data.create_entry( - upload_id='upload_2', uploader=test_user, published=True, with_embargo=True) - example_data.create_entry( - upload_id='upload_2', uploader=test_user, published=True, with_embargo=True) + example_data.create_entry(upload_id='upload_2') + example_data.create_entry(upload_id='upload_2') example_data.create_upload('upload_3', user_id=other_test_user.user_id, published=True, embargo_length=0) - example_data.create_entry( - upload_id='upload_3', uploader=other_test_user, published=True, with_embargo=False) + example_data.create_entry(upload_id='upload_3') example_data.save() @@ -143,8 +139,8 @@ class TestEditRepo(): edit_data = dict( comment='test_edit_props', references=['http://test', 'http://test2'], - entry_coauthors=[other_test_user.user_id], - shared_with=[other_test_user.user_id]) + # reviewers=[other_test_user.user_id], # TODO: need to set on upload level + entry_coauthors=[other_test_user.user_id]) rv = self.perform_edit(**edit_data, query=self.query('upload_1')) result = rv.json() assert rv.status_code == 200, result @@ -160,18 +156,17 @@ class TestEditRepo(): assert self.mongo(1, comment='test_edit_props') assert self.mongo(1, references=['http://test', 'http://test2']) assert self.mongo(1, entry_coauthors=[other_test_user.user_id]) - assert self.mongo(1, shared_with=[other_test_user.user_id]) + # assert self.mongo(1, reviewers=[other_test_user.user_id]) TODO: need to be set on upload level self.assert_elastic(1, comment='test_edit_props') self.assert_elastic(1, references=['http://test', 'http://test2']) self.assert_elastic(1, authors=[test_user.user_id, other_test_user.user_id]) - self.assert_elastic(1, owners=[test_user.user_id, other_test_user.user_id]) + # self.assert_elastic(1, owners=[test_user.user_id, other_test_user.user_id]) edit_data = dict( comment='', references=[], - entry_coauthors=[], - shared_with=[]) + entry_coauthors=[]) rv = self.perform_edit(**edit_data, query=self.query('upload_1')) result = rv.json() assert rv.status_code == 200 @@ -187,7 +182,7 @@ class TestEditRepo(): assert self.mongo(1, comment=None) assert self.mongo(1, references=[]) assert self.mongo(1, entry_coauthors=[]) - assert self.mongo(1, shared_with=[]) + assert self.mongo(1, reviewers=None) self.assert_elastic(1, comment=None) self.assert_elastic(1, references=[]) diff --git a/tests/app/v1/routers/test_uploads.py b/tests/app/v1/routers/test_uploads.py index 6f6717cb24d12fdbfa9bda51a35d831d99a451ac..b884bb846bc509ad2797fd3905b66220fce41088 100644 --- a/tests/app/v1/routers/test_uploads.py +++ b/tests/app/v1/routers/test_uploads.py @@ -323,9 +323,11 @@ def get_upload_entries_metadata(entries: List[Dict[str, Any]]) -> Iterable[Entry @pytest.mark.parametrize('kwargs', [ pytest.param( dict( - expected_upload_ids=['id_embargo', 'id_unpublished', 'id_published', 'id_processing', 'id_empty'], + expected_upload_ids=[ + 'id_embargo', 'id_embargo_shared_upload', 'id_unpublished', 'id_unpublished_shared_upload', + 'id_published', 'id_processing', 'id_empty'], expected_pagination={ - 'total': 5, 'page': 1, 'page_after_value': None, 'next_page_after_value': None, + 'total': 7, 'page': 1, 'page_after_value': None, 'next_page_after_value': None, 'page_url': Any, 'next_page_url': None, 'prev_page_url': None, 'first_page_url': Any} ), id='no-args'), pytest.param( @@ -351,17 +353,19 @@ def get_upload_entries_metadata(entries: List[Dict[str, Any]]) -> Iterable[Entry pytest.param( dict( query_params={'is_processing': False}, - expected_upload_ids=['id_embargo', 'id_unpublished', 'id_published', 'id_empty'], + expected_upload_ids=[ + 'id_embargo', 'id_embargo_shared_upload', 'id_unpublished', 'id_unpublished_shared_upload', + 'id_published', 'id_empty'], ), id='filter-is_processing-False'), pytest.param( dict( query_params={'is_published': True}, - expected_upload_ids=['id_embargo', 'id_published'], + expected_upload_ids=['id_embargo', 'id_embargo_shared_upload', 'id_published'], ), id='filter-is_published-True'), pytest.param( dict( query_params={'is_published': False}, - expected_upload_ids=['id_unpublished', 'id_processing', 'id_empty'], + expected_upload_ids=['id_unpublished', 'id_unpublished_shared_upload', 'id_processing', 'id_empty'], ), id='filter-is_published-False'), pytest.param( dict( @@ -386,30 +390,30 @@ def get_upload_entries_metadata(entries: List[Dict[str, Any]]) -> Iterable[Entry pytest.param( dict( query_params={'page_size': 2}, - expected_upload_ids=['id_embargo', 'id_unpublished'], + expected_upload_ids=['id_embargo', 'id_embargo_shared_upload'], expected_pagination={ - 'total': 5, 'page': 1, 'page_after_value': None, 'next_page_after_value': '1', + 'total': 7, 'page': 1, 'page_after_value': None, 'next_page_after_value': '1', 'page_url': Any, 'next_page_url': Any, 'prev_page_url': None, 'first_page_url': Any} ), id='pag-page-1'), pytest.param( dict( query_params={'page_size': 2, 'page': 2}, - expected_upload_ids=['id_published', 'id_processing'], + expected_upload_ids=['id_unpublished', 'id_unpublished_shared_upload'], expected_pagination={ - 'total': 5, 'page': 2, 'page_after_value': '1', 'next_page_after_value': '3', + 'total': 7, 'page': 2, 'page_after_value': '1', 'next_page_after_value': '3', 'page_url': Any, 'next_page_url': Any, 'prev_page_url': Any, 'first_page_url': Any} ), id='pag-page-2'), pytest.param( dict( - query_params={'page_size': 2, 'page': 3}, + query_params={'page_size': 3, 'page': 3}, expected_upload_ids=['id_empty'], expected_pagination={ - 'total': 5, 'page': 3, 'page_after_value': '3', 'next_page_after_value': None, + 'total': 7, 'page': 3, 'page_after_value': '5', 'next_page_after_value': None, 'page_url': Any, 'next_page_url': None, 'prev_page_url': Any, 'first_page_url': Any} ), id='pag-page-3'), pytest.param( dict( - query_params={'page_size': 2, 'page': 4}, + query_params={'page_size': 2, 'page': 5}, expected_status_code=400 ), id='pag-page-out-of-range'), pytest.param( @@ -417,7 +421,7 @@ def get_upload_entries_metadata(entries: List[Dict[str, Any]]) -> Iterable[Entry query_params={'page_size': 2, 'order': 'desc'}, expected_upload_ids=['id_empty', 'id_processing'], expected_pagination={ - 'total': 5, 'page': 1, 'page_after_value': None, 'next_page_after_value': '1', + 'total': 7, 'page': 1, 'page_after_value': None, 'next_page_after_value': '1', 'page_url': Any, 'next_page_url': Any, 'prev_page_url': None, 'first_page_url': Any} ), id='pag-page-order-desc'), pytest.param( @@ -474,10 +478,10 @@ def test_get_upload( @pytest.mark.parametrize('kwargs', [ pytest.param( dict( - expected_data_len=2, - expected_response={'processing_successful': 2, 'processing_failed': 0}, + expected_data_len=1, + expected_response={'processing_successful': 1, 'processing_failed': 0}, expected_pagination={ - 'total': 2, 'page': 1, 'page_after_value': None, 'next_page_after_value': None, + 'total': 1, 'page': 1, 'page_after_value': None, 'next_page_after_value': None, 'page_url': Any, 'next_page_url': None, 'prev_page_url': None, 'first_page_url': Any}), id='no-args'), pytest.param( @@ -498,7 +502,7 @@ def test_get_upload( pytest.param( dict( user='admin_user', - expected_data_len=2), + expected_data_len=1), id='admin-access'), pytest.param( dict( @@ -507,47 +511,52 @@ def test_get_upload( id='invalid-upload_id'), pytest.param( dict( - query_args={'page_size': 1}, - expected_data_len=1, - expected_response={'processing_successful': 2, 'processing_failed': 0}, + upload_id='id_published', + query_args={'page_size': 5}, + expected_data_len=5, + expected_response={'processing_successful': 23, 'processing_failed': 0}, expected_pagination={ - 'total': 2, 'page': 1, 'page_after_value': None, 'next_page_after_value': '0', 'order_by': 'mainfile', + 'total': 23, 'page': 1, 'page_after_value': None, 'next_page_after_value': '4', 'order_by': 'mainfile', 'page_url': Any, 'next_page_url': Any, 'prev_page_url': None, 'first_page_url': Any}), id='pag-page-1'), pytest.param( dict( - query_args={'page_size': 1, 'page': 1}, - expected_data_len=1, - expected_response={'processing_successful': 2, 'processing_failed': 0}, + upload_id='id_published', + query_args={'page_size': 5, 'page': 1}, + expected_data_len=5, + expected_response={'processing_successful': 23, 'processing_failed': 0}, expected_pagination={ - 'total': 2, 'page': 1, 'page_after_value': None, 'next_page_after_value': '0', 'order_by': 'mainfile', + 'total': 23, 'page': 1, 'page_after_value': None, 'next_page_after_value': '4', 'order_by': 'mainfile', 'page_url': Any, 'next_page_url': Any, 'prev_page_url': None, 'first_page_url': Any}), id='pag-page-1-by-page'), pytest.param( dict( - query_args={'page_size': 1, 'page': 2}, - expected_data_len=1, - expected_response={'processing_successful': 2, 'processing_failed': 0}, + upload_id='id_published', + query_args={'page_size': 10, 'page': 3}, + expected_data_len=3, + expected_response={'processing_successful': 23, 'processing_failed': 0}, expected_pagination={ - 'total': 2, 'page': 2, 'page_after_value': '0', 'next_page_after_value': None, 'order_by': 'mainfile', + 'total': 23, 'page': 3, 'page_after_value': '19', 'next_page_after_value': None, 'order_by': 'mainfile', 'page_url': Any, 'next_page_url': None, 'prev_page_url': Any, 'first_page_url': Any}), - id='pag-page-2-by-page'), + id='pag-page-3-by-page'), pytest.param( dict( - query_args={'page_size': 1, 'page_after_value': '0'}, - expected_data_len=1, - expected_response={'processing_successful': 2, 'processing_failed': 0}, + upload_id='id_published', + query_args={'page_size': 10, 'page_after_value': '19'}, + expected_data_len=3, + expected_response={'processing_successful': 23, 'processing_failed': 0}, expected_pagination={ - 'total': 2, 'page': 2, 'page_after_value': '0', 'next_page_after_value': None, 'order_by': 'mainfile', + 'total': 23, 'page': 3, 'page_after_value': '19', 'next_page_after_value': None, 'order_by': 'mainfile', 'page_url': Any, 'next_page_url': None, 'prev_page_url': Any, 'first_page_url': Any}), - id='pag-page-2-by-page_after_value'), + id='pag-page-3-by-page_after_value'), pytest.param( dict( + upload_id='id_published', query_args={'page_size': 0}, expected_data_len=0, - expected_response={'processing_successful': 2, 'processing_failed': 0}, + expected_response={'processing_successful': 23, 'processing_failed': 0}, expected_pagination={ - 'total': 2, 'page': 1, 'page_after_value': None, 'next_page_after_value': None, 'order_by': 'mainfile', + 'total': 23, 'page': 1, 'page_after_value': None, 'next_page_after_value': None, 'order_by': 'mainfile', 'page_url': Any, 'next_page_url': None, 'prev_page_url': None, 'first_page_url': None}), id='pag-page_size-zero'), pytest.param( @@ -562,11 +571,12 @@ def test_get_upload( id='pag-out-of-rage-page_after_value'), pytest.param( dict( + upload_id='id_published', query_args={'page_size': 1, 'order_by': 'parser_name'}, expected_data_len=1, - expected_response={'processing_successful': 2, 'processing_failed': 0}, + expected_response={'processing_successful': 23, 'processing_failed': 0}, expected_pagination={ - 'total': 2, 'page': 1, 'page_after_value': None, 'next_page_after_value': '0', 'order_by': 'parser_name', + 'total': 23, 'page': 1, 'page_after_value': None, 'next_page_after_value': '0', 'order_by': 'parser_name', 'page_url': Any, 'next_page_url': Any, 'prev_page_url': None, 'first_page_url': Any}), id='pag-order_by-parser_name'), pytest.param( @@ -590,7 +600,7 @@ def test_get_upload_entries( user = kwargs.get('user', 'test_user') query_args = kwargs.get('query_args', {}) expected_status_code = kwargs.get('expected_status_code', 200) - expected_data_len = kwargs.get('expected_data_len', 2) + expected_data_len = kwargs.get('expected_data_len', 1) expected_response = kwargs.get('expected_response', {}) expected_pagination = kwargs.get('expected_pagination', {}) user_auth, __token = test_auth_dict[user] diff --git a/tests/test_datamodel.py b/tests/test_datamodel.py index bfb7094027861c06e7bd8cb8480d917365440d52..c5c18ed6833041523f9c20893d2ffe0795cc81fa 100644 --- a/tests/test_datamodel.py +++ b/tests/test_datamodel.py @@ -67,7 +67,9 @@ def _gen_ref(): return random.choice(references) -def generate_calc(pid: int = 0, calc_id: str = None, upload_id: str = None, with_embargo=None) -> datamodel.EntryMetadata: +def generate_calc( + pid: int = 0, calc_id: str = None, upload_id: str = None, with_embargo=None, + reviewers=[]) -> datamodel.EntryMetadata: random.seed(pid) entry = datamodel.EntryMetadata() @@ -84,7 +86,7 @@ def generate_calc(pid: int = 0, calc_id: str = None, upload_id: str = None, with entry.with_embargo = with_embargo if with_embargo is not None else random.choice([True, False]) entry.published = True entry.entry_coauthors = list(_gen_user() for _ in range(0, random.choice(low_numbers_for_refs_and_datasets))) - entry.shared_with = list(_gen_user() for _ in range(0, random.choice(low_numbers_for_refs_and_datasets))) + entry.reviewers = reviewers entry.comment = random.choice(comments) entry.references = list(_gen_ref() for _ in range(0, random.choice(low_numbers_for_refs_and_datasets))) entry.datasets = list( diff --git a/tests/test_search.py b/tests/test_search.py index 4c0921752ccb4672f3795195838af5bd4190d84d..4d90342c69b9e679e87987ca8b6542efe4b471b1 100644 --- a/tests/test_search.py +++ b/tests/test_search.py @@ -116,14 +116,12 @@ def test_mapping_compatibility(elastic_infra): @pytest.fixture() def example_data(elastic, test_user, other_test_user): data = ExampleData(uploader=test_user) - + data.create_upload(upload_id='test_upload_id', published=True, embargo_length=12) for i in range(0, 4): data.create_entry( upload_id='test_upload_id', calc_id=f'test_entry_id_{i}', - mainfile='test_content/test_embargo_entry/mainfile.json', - shared_with=[], - with_embargo=True) + mainfile='test_content/test_embargo_entry/mainfile.json') data.save(with_files=False, with_mongo=False) diff --git a/tests/utils.py b/tests/utils.py index 2b39a97194c2f7b39d1e683788ac6b4c287c7406..6072dd33843cac48b2253cafe2f7b05440d16a9b 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -137,7 +137,6 @@ class ExampleData: self.entry_defaults = kwargs self._entry_id_counter = 1 - self._upload_id_counter = 1 self._time_stamp = datetime.utcnow() @@ -145,38 +144,6 @@ class ExampleData: from tests.test_files import create_test_upload_files from nomad import processing as proc - # Consistency checks - uploads_published: Dict[str, bool] = {} - uploads_embargo_length: Dict[str, int] = {} - for upload_id in set(list(self.uploads) + list(self.upload_entries)): - entry_ids = self.upload_entries.get(upload_id, []) - embargo_length = self.uploads.get(upload_id, {}).get('embargo_length') - # Check entries must have consistent published and with_embargo - entry_published_values = set() - entry_with_embargo_values = set() - for entry_id in entry_ids: - entry_published_values.add(self.entries[entry_id].published) - entry_with_embargo_values.add(self.entries[entry_id].with_embargo) - # Check/default published and with_embargo - if len(entry_ids) > 0: - assert len(entry_published_values) == 1, 'Inconsistent published flags' - assert len(entry_with_embargo_values) == 1, 'Inconsistent embargo flags' - published = entry_published_values.pop() - with_embargo = entry_with_embargo_values.pop() - if upload_id in self.uploads: - assert embargo_length is not None, 'No embargo provided on upload' - assert (embargo_length > 0) == with_embargo, 'Inconsistent embargo' - assert published == (self.uploads[upload_id]['publish_time'] is not None) - else: - # No uploads created. Just generate it - embargo_length = 36 if with_embargo else 0 - else: - published = False - if embargo_length is None: - embargo_length = 0 - uploads_published[upload_id] = published - uploads_embargo_length[upload_id] = embargo_length - # Save if with_mongo: for upload_id, upload_dict in self.uploads.items(): @@ -193,10 +160,6 @@ class ExampleData: mainfile=entry_metadata.mainfile, parser_name='parsers/vasp', process_status=process_status) - upload_dict = self.uploads.get(entry_metadata.upload_id) - if upload_dict: - # Mirror fields from upload - entry_metadata.uploader = upload_dict['user_id'] mongo_entry.set_mongo_entry_metadata(entry_metadata) mongo_entry.save() @@ -205,7 +168,7 @@ class ExampleData: search.index(archives, update_materials=True, refresh=True) if with_files: - for upload_id in set(list(self.uploads) + list(self.upload_entries)): + for upload_id, upload_dict in self.uploads.items(): entry_ids = self.upload_entries.get(upload_id, []) archives = [] for entry_id in entry_ids: @@ -213,8 +176,8 @@ class ExampleData: archives.append(self.archives[entry_id]) create_test_upload_files( - upload_id, archives, published=uploads_published[upload_id], - embargo_length=uploads_embargo_length[upload_id]) + upload_id, archives, published=upload_dict.get('publish_time') is not None, + embargo_length=upload_dict['embargo_length']) from nomad import files assert files.UploadFiles.get(upload_id) is not None @@ -246,13 +209,14 @@ class ExampleData: 'last_update': self._next_time_stamp(), 'embargo_length': 0, 'publish_time': None, + 'license': 'CC BY 4.0', 'published_to': []} upload_dict.update(kwargs) if published is not None: if published and not upload_dict['publish_time']: upload_dict['publish_time'] = self._next_time_stamp() elif not published: - assert not upload_dict['publish_time'] + assert not upload_dict.get('publish_time') if 'user_id' not in upload_dict and 'uploader' in self.entry_defaults: upload_dict['user_id'] = self.entry_defaults['uploader'].user_id self.uploads[upload_id] = upload_dict @@ -266,6 +230,9 @@ class ExampleData: results: Union[Results, dict] = None, archive: dict = None, **kwargs) -> EntryArchive: + assert upload_id in self.uploads, 'Must create the upload first' + upload_dict = self.uploads[upload_id] + if entry_id is None: entry_id = calc_id @@ -276,10 +243,6 @@ class ExampleData: if mainfile is None: mainfile = f'mainfile_for_{entry_id}' - if upload_id is None: - upload_id = f'test_upload_id_{self._upload_id_counter}' - self._upload_id_counter += 1 - if entry_archive is None: entry_archive = EntryArchive() @@ -290,25 +253,26 @@ class ExampleData: if entry_metadata is None: entry_metadata = entry_archive.m_create(EntryMetadata) - upload_create_time = None - if upload_id in self.uploads: - upload_create_time = self.uploads[upload_id].get('upload_create_time') - if upload_create_time is None: - upload_create_time = self._next_time_stamp() - entry_metadata.m_update( calc_id=entry_id, upload_id=upload_id, mainfile=mainfile, calc_hash='dummy_hash_' + entry_id, domain='dft', - upload_create_time=upload_create_time, entry_create_time=self._next_time_stamp(), processed=True, - published=bool(self.uploads.get(upload_id, {}).get('publish_time', True)), - with_embargo=self.uploads.get(upload_id, {}).get('embargo_length', 0) > 0, parser_name='parsers/vasp') entry_metadata.m_update(**self.entry_defaults) + # Fetch data from Upload + upload_keys = ['upload_name', 'user_id', 'reviewers', 'upload_create_time', 'license', 'publish_time'] + upload_values = {k: upload_dict[k] for k in upload_keys if k in upload_dict} + upload_values['with_embargo'] = upload_dict['embargo_length'] > 0 + upload_values['published'] = upload_dict.get('publish_time') is not None + if 'user_id' in upload_values: + upload_values['uploader'] = upload_values.pop('user_id') + for k in upload_keys + ['with_embargo', 'published']: + assert k not in kwargs, f'Upload level metadata specified on entry level: {k}' + entry_metadata.m_update(**upload_values) entry_metadata.m_update(**kwargs) # create v1 default data @@ -356,11 +320,6 @@ class ExampleData: if entry_archive.results.material.material_id is None: entry_archive.results.material.material_id = material_id - if upload_id in self.uploads: - # Check embargo consistency - with_embargo = (self.uploads[upload_id]['embargo_length'] > 0) - assert entry_metadata.with_embargo == with_embargo, 'Inconsistent embargo flags' - self.archives[entry_id] = entry_archive self.entries[entry_id] = entry_metadata self.upload_entries.setdefault(entry_metadata.upload_id, []).append(entry_id) @@ -377,7 +336,7 @@ class ExampleData: def create_structure( self, - id: int, h: int, o: int, extra: List[str], periodicity: int, + upload_id: str, id: int, h: int, o: int, extra: List[str], periodicity: int, optimade: bool = True, metadata: dict = None): ''' Creates a calculation in Elastic and Mongodb with the given properties. @@ -423,8 +382,7 @@ class ExampleData: self.create_entry( entry_archive=archive, - domain='dft', calc_id='test_calc_id_%d' % id, upload_id='test_upload', - published=True, processed=True, with_embargo=False, **kwargs) + upload_id=upload_id, calc_id='test_calc_id_%d' % id, domain='dft', **kwargs) def create_template_upload_file(