diff --git a/nomad/app/flask/api/auth.py b/nomad/app/flask/api/auth.py index 751f62190d77342b3151f29a971ddf704dd7c7c4..6962090a7c0a36dc30b3b479182f88f083932d12 100644 --- a/nomad/app/flask/api/auth.py +++ b/nomad/app/flask/api/auth.py @@ -312,7 +312,7 @@ def has_read_access(upload_id: str, calc_id: str = None): try: upload = processing.Upload.get(upload_id) - if upload.published and upload.embargo_length == 0: + if upload.published and not upload.with_embargo: return True if g.user is None: diff --git a/nomad/app/v1/routers/uploads.py b/nomad/app/v1/routers/uploads.py index c6b6f4866745fbfbeeb868bf91756f2ce9f62d55..016c82d33fde788ed8fabb9d124e751b67782bb7 100644 --- a/nomad/app/v1/routers/uploads.py +++ b/nomad/app/v1/routers/uploads.py @@ -91,6 +91,14 @@ class UploadProcData(ProcData): published_to: List[str] = Field( None, description='A list of other NOMAD deployments that this upload was uploaded to already.') + publish_time: Optional[datetime] = Field( + 'Date and time of publication, if the upload has been published.') + with_embargo: bool = Field( + description='If the upload has an embargo set (embargo_length not equal to zero).') + embargo_length: int = Field( + description='The length of the requested embargo, in months. 0 if no embargo is requested.') + license: str = Field( + description='The license under which this upload is distributed.') last_status_message: Optional[str] = Field( None, description='The last informative message that the processing saved about this uploads status.') @@ -1379,7 +1387,7 @@ def _get_upload_with_read_access(upload_id: str, user: User, include_others: boo if not upload.published: raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=strip(''' You do not have access to the specified upload - not published yet.''')) - if upload.published and upload.embargo_length > 0: + if upload.published and upload.with_embargo: raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=strip(''' You do not have access to the specified upload - published with embargo.''')) return upload diff --git a/nomad/cli/admin/admin.py b/nomad/cli/admin/admin.py index a2f09cb1c34ed559dad36f7e98af48402d8b35d0..b0531a0aeef71e457e10ec21fc5899598a8f5c81 100644 --- a/nomad/cli/admin/admin.py +++ b/nomad/cli/admin/admin.py @@ -320,7 +320,7 @@ def index_materials(threads, code, dry, in_place, n, source): calc.upload_id = metadata["upload_id"] mongo_calc = proc.Calc.get(calc.calc_id) calc.published = mongo_calc.upload.published - calc.with_embargo = mongo_calc.upload.embargo_length > 0 + calc.with_embargo = mongo_calc.upload.with_embargo calc.owners = [mongo_calc.upload.user_id] + mongo_calc["metadata"]["shared_with"] enc_idealized_structure = encyclopedia.material.idealized_structure idealized_structure = IdealizedStructure() diff --git a/nomad/datamodel/datamodel.py b/nomad/datamodel/datamodel.py index 31072f92dcea6d2862b031e8a28332d680c58d97..e53f49e1cdf819a3c23288eea29f8f7b354b984d 100644 --- a/nomad/datamodel/datamodel.py +++ b/nomad/datamodel/datamodel.py @@ -598,7 +598,7 @@ class EntryMetadata(metainfo.MSection): license of this entry. ''', default='CC BY 4.0', - categories=[MongoMetadata, EditableUserMetadata]) + categories=[EditableUserMetadata]) with_embargo = metainfo.Quantity( type=bool, default=False, diff --git a/nomad/processing/data.py b/nomad/processing/data.py index 083e48502c95d834c721765450144ef02b7ec395..91f7f5a3c6efa97946f6bced51c6e7e363faecd6 100644 --- a/nomad/processing/data.py +++ b/nomad/processing/data.py @@ -297,7 +297,8 @@ class Calc(Proc): entry_metadata.upload_time = upload.upload_time entry_metadata.upload_name = upload.name entry_metadata.published = upload.published - entry_metadata.with_embargo = (upload.embargo_length > 0) + entry_metadata.with_embargo = upload.with_embargo + entry_metadata.license = upload.license # Entry metadata entry_metadata.parser_name = self.parser_name if self.parser_name is not None: @@ -771,20 +772,20 @@ class Upload(Proc): id_field = 'upload_id' upload_id = StringField(primary_key=True) - pending_operations = ListField(DictField(), default=[]) - embargo_length = IntField(default=0, required=True) - name = StringField(default=None) upload_time = DateTimeField() user_id = StringField(required=True) - publish_time = DateTimeField() last_update = DateTimeField() + publish_time = DateTimeField() + embargo_length = IntField(default=0, required=True) + license = StringField(default='CC BY 4.0', required=True) - publish_directly = BooleanField(default=False) from_oasis = BooleanField(default=False) oasis_deployment_id = StringField(default=None) published_to = ListField(StringField()) + publish_directly = BooleanField(default=False) + pending_operations = ListField(DictField(), default=[]) joined = BooleanField(default=False) meta: Any = { @@ -834,6 +835,10 @@ class Upload(Proc): def published(self) -> bool: return self.publish_time is not None + @property + def with_embargo(self) -> bool: + return self.embargo_length > 0 + def get_logger(self, **kwargs): logger = super().get_logger() user = self.uploader @@ -967,7 +972,7 @@ class Upload(Proc): with self.entries_metadata() as entries: if isinstance(self.upload_files, StagingUploadFiles): with utils.timer(logger, 'staged upload files packed'): - self.staging_upload_files.pack(entries, with_embargo=(self.embargo_length > 0)) + self.staging_upload_files.pack(entries, with_embargo=self.with_embargo) with utils.timer(logger, 'index updated'): search.publish(entries) @@ -994,7 +999,7 @@ class Upload(Proc): 'Only published uploads can be published to the central NOMAD.' assert config.oasis.central_nomad_deployment_id not in self.published_to, \ 'Upload is already published to the central NOMAD.' - assert self.embargo_length == 0, 'Upload must not be under embargo' + assert not self.with_embargo, 'Upload must not be under embargo' from nomad.cli.client.client import _create_client as create_client central_nomad_client = create_client( @@ -1483,7 +1488,7 @@ class Upload(Proc): with utils.timer(logger, 'staged upload files re-packed'): self.staging_upload_files.pack( self.entries_mongo_metadata(), - with_embargo=(self.embargo_length > 0), + with_embargo=self.with_embargo, create=False, include_raw=False) self._cleanup_staging_files() @@ -1497,7 +1502,7 @@ class Upload(Proc): with utils.lnr(logger, 'publish failed'): with self.entries_metadata() as calcs: with utils.timer(logger, 'upload staging files packed'): - self.staging_upload_files.pack(calcs, with_embargo=(self.embargo_length > 0)) + self.staging_upload_files.pack(calcs, with_embargo=self.with_embargo) with utils.timer(logger, 'upload staging files deleted'): self.staging_upload_files.delete() @@ -1643,7 +1648,7 @@ class Upload(Proc): need_to_reindex = True if upload_metadata.embargo_length is not None: assert 0 <= upload_metadata.embargo_length <= 36, 'Invalid `embargo_length`, must be between 0 and 36 months' - if self.published and (self.embargo_length > 0) != (upload_metadata.embargo_length > 0): + if self.published and self.with_embargo != (upload_metadata.embargo_length > 0): need_to_repack = True need_to_reindex = True self.embargo_length = upload_metadata.embargo_length @@ -1658,7 +1663,7 @@ class Upload(Proc): self.save() if need_to_repack: - PublicUploadFiles(self.upload_id).re_pack(with_embargo=self.embargo_length > 0) + PublicUploadFiles(self.upload_id).re_pack(with_embargo=self.with_embargo) if need_to_reindex and self.total_calcs > 0: # Update entries and elastic search @@ -1807,7 +1812,7 @@ class Upload(Proc): 'export_options.include_archive_files', 'export_options.include_datasets', 'upload._id', 'upload.user_id', - 'upload.create_time', 'upload.upload_time', 'upload.process_status', + 'upload.create_time', 'upload.upload_time', 'upload.process_status', 'upload.license', 'upload.embargo_length', 'entries') required_keys_entry_level = ( @@ -1843,7 +1848,7 @@ class Upload(Proc): assert bundle_info['entries'], 'Upload published but no entries in bundle_info.json' # Define which keys we think okay to copy from the bundle upload_keys_to_copy = [ - 'name', 'embargo_length', 'from_oasis', 'oasis_deployment_id'] + 'name', 'embargo_length', 'license', 'from_oasis', 'oasis_deployment_id'] if settings.keep_original_timestamps: upload_keys_to_copy.extend(('create_time', 'upload_time', 'publish_time',)) try: @@ -1952,7 +1957,7 @@ class Upload(Proc): if self.published and embargo_length is not None: # Repack the upload - PublicUploadFiles(self.upload_id).re_pack(with_embargo=self.embargo_length > 0) + PublicUploadFiles(self.upload_id).re_pack(with_embargo=self.with_embargo) # Check the archive metadata, if included if settings.include_archive_files: diff --git a/tests/app/v1/routers/test_uploads.py b/tests/app/v1/routers/test_uploads.py index 5e5cd165d7e28bd6984093ade13ae85d1e66f266..45b77fdeef022b88373c442bd7e2d389dbf5cd7c 100644 --- a/tests/app/v1/routers/test_uploads.py +++ b/tests/app/v1/routers/test_uploads.py @@ -179,6 +179,13 @@ def assert_upload(response_json, **kwargs): assert 'upload_id' in response_json assert 'upload_id' in data assert 'create_time' in data + assert 'published' in data + assert 'with_embargo' in data + assert 'embargo_length' in data + assert 'license' in data + assert (data['embargo_length'] > 0) == data['with_embargo'] + if data['published']: + assert 'publish_time' in data for key, value in kwargs.items(): assert data.get(key, None) == value @@ -311,7 +318,7 @@ def get_upload_entries_metadata(entries: List[Dict[str, Any]]) -> Iterable[Entry return [ EntryMetadata( domain='dft', calc_id=entry['entry_id'], mainfile=entry['mainfile'], - with_embargo=Upload.get(entry['upload_id']).embargo_length > 0) + with_embargo=Upload.get(entry['upload_id']).with_embargo) for entry in entries] @@ -985,7 +992,7 @@ def test_put_upload_metadata( pass if upload_id == 'id_published_w': - assert Upload.get(upload_id).embargo_length > 0 + assert Upload.get(upload_id).with_embargo es_data = search(owner=None, query=dict(entry_id='id_published_w_entry')).data[0] assert es_data['with_embargo'] @@ -1024,7 +1031,7 @@ def test_put_upload_metadata( assert datetime.fromisoformat(es_data['upload_time']) == upload.upload_time if 'embargo_length' in query_args: assert upload.embargo_length == query_args['embargo_length'] - assert entry_metadata.with_embargo == es_data['with_embargo'] == (upload.embargo_length > 0) + assert entry_metadata.with_embargo == es_data['with_embargo'] == upload.with_embargo @pytest.mark.parametrize('mode, source_path, query_args, user, use_upload_token, test_limit, accept_json, expected_status_code', [ diff --git a/tests/processing/test_data.py b/tests/processing/test_data.py index 0138402aafbfa2b220882c43b898838611c70b8f..925dd37b14671478ee16212eb68d34138c46a3b4 100644 --- a/tests/processing/test_data.py +++ b/tests/processing/test_data.py @@ -526,14 +526,14 @@ def test_re_process_match(non_empty_processed, published, monkeypatch, no_warn): assert upload.total_calcs == 2 if not published: assert upload.published == published - assert upload.embargo_length == 0 + assert not upload.with_embargo def test_re_pack(published: Upload): upload_id = published.upload_id upload_files: PublicUploadFiles = published.upload_files # type: ignore assert upload_files.access == 'restricted' - assert published.embargo_length > 0 + assert published.with_embargo calc = Calc.objects(upload_id=upload_id).first() # Lift embargo @@ -750,7 +750,7 @@ def test_set_upload_metadata(proc_infra, test_users_dict, user, metadata_to_set, assert entry_metadata.upload_time == upload.upload_time if 'embargo_length' in metadata_to_set: assert upload.embargo_length == metadata_to_set['embargo_length'] - assert entry_metadata.with_embargo == (upload.embargo_length > 0) + assert entry_metadata.with_embargo == upload.with_embargo def test_skip_matching(proc_infra, test_user): diff --git a/tests/test_cli.py b/tests/test_cli.py index 2a7a9e2c6316c53fd201e15eb8f43d1ab0d52c4e..eae32088302f6d9322ae5f6b05ed6f3117105089 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -105,7 +105,7 @@ class TestAdmin: calc = Calc.objects(upload_id=upload_id).first() assert published.upload_files.exists() - assert published.embargo_length > 0 + assert published.with_embargo assert search.SearchRequest().owner('public').search_parameter('upload_id', upload_id).execute()['total'] == 0 result = click.testing.CliRunner().invoke( @@ -114,7 +114,7 @@ class TestAdmin: assert result.exit_code == 0 published.block_until_complete() - assert not (published.embargo_length > 0) == lifted + assert not published.with_embargo == lifted assert (search.SearchRequest().owner('public').search_parameter('upload_id', upload_id).execute()['total'] > 0) == lifted if lifted: with files.UploadFiles.get(upload_id=upload_id).read_archive(calc_id=calc.calc_id) as archive: @@ -242,7 +242,7 @@ class TestAdminUploads: def test_re_pack(self, published, monkeypatch): upload_id = published.upload_id calc = Calc.objects(upload_id=upload_id).first() - assert published.embargo_length > 0 + assert published.with_embargo published.embargo_length = 0 published.save() diff --git a/tests/test_files.py b/tests/test_files.py index 7cb41d295ec7844df3a231b99d0c263bdf7c4f09..1b93c487f66a0228826f83bc833dc37b65d28aa9 100644 --- a/tests/test_files.py +++ b/tests/test_files.py @@ -384,7 +384,7 @@ def create_public_upload( _, entries, upload_files = create_staging_upload(upload_id, calc_specs, embargo_length) - upload_files.pack(entries, with_embargo=(embargo_length > 0)) + upload_files.pack(entries, with_embargo=embargo_length > 0) upload_files.delete() if with_upload: upload = Upload.get(upload_id)