Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
nomad-lab
nomad-FAIR
Commits
32e89d75
Commit
32e89d75
authored
Mar 01, 2019
by
Markus Scheidgen
Browse files
Minor fixes.
parent
f06f3e93
Pipeline
#44422
passed with stages
in 27 minutes and 21 seconds
Changes
5
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
nomad/coe_repo/upload.py
View file @
32e89d75
...
...
@@ -148,7 +148,7 @@ class Upload(Base): # type: ignore
upload
=
coe_upload
)
repo_db
.
add
(
coe_calc
)
coe_calc
.
apply_calc_with_metadata
(
calc
,
context
=
context
)
logger
.
info
(
'added calculation, not yet committed'
,
calc_id
=
coe_calc
.
calc_id
)
logger
.
debug
(
'added calculation, not yet committed'
,
calc_id
=
coe_calc
.
calc_id
)
# commit
if
has_calcs
:
...
...
nomad/files.py
View file @
32e89d75
...
...
@@ -77,8 +77,11 @@ class PathObject:
object_id: The object id (i.e. directory path)
os_path: Override the "object storage" path with the given path.
prefix: Add a 3-digit prefix directory, e.g. foo/test/ -> foo/tes/test
create_prefix: Create the prefix right away
"""
def
__init__
(
self
,
bucket
:
str
,
object_id
:
str
,
os_path
:
str
=
None
,
prefix
:
bool
=
False
)
->
None
:
def
__init__
(
self
,
bucket
:
str
,
object_id
:
str
,
os_path
:
str
=
None
,
prefix
:
bool
=
False
,
create_prefix
:
bool
=
False
)
->
None
:
if
os_path
:
self
.
os_path
=
os_path
else
:
...
...
@@ -91,6 +94,9 @@ class PathObject:
segments
.
append
(
last
)
self
.
os_path
=
os
.
path
.
join
(
*
segments
)
if
create_prefix
:
os
.
makedirs
(
os
.
path
.
dirname
(
self
.
os_path
),
exist_ok
=
True
)
def
delete
(
self
)
->
None
:
basename
=
os
.
path
.
basename
(
self
.
os_path
)
parent_directory
=
os
.
path
.
dirname
(
self
.
os_path
)
...
...
@@ -461,6 +467,8 @@ class StagingUploadFiles(UploadFiles):
Arguments:
bagit_metadata: Additional data added to the bagit metadata.
"""
self
.
logger
.
debug
(
'started to pack upload'
)
# freeze the upload
assert
not
self
.
is_frozen
,
"Cannot pack an upload that is packed, or packing."
with
open
(
self
.
_frozen_file
.
os_path
,
'wt'
)
as
f
:
...
...
@@ -472,6 +480,7 @@ class StagingUploadFiles(UploadFiles):
# copy raw -> .restricted
shutil
.
copytree
(
self
.
_raw_dir
.
os_path
,
restricted_dir
.
os_path
)
self
.
logger
.
debug
(
'copied raw data'
)
# We do a trick to deal with multiple mainfiles sharing the same aux files while
# having different restriction, we first move all aux files to public (including
...
...
@@ -496,10 +505,12 @@ class StagingUploadFiles(UploadFiles):
# file might not have been moved since all mainfiles among aux files were restricted
if
source
.
exists
():
os
.
rename
(
source
.
os_path
,
restricted_dir
.
join_file
(
mainfile
).
os_path
)
self
.
logger
.
debug
(
'moved public data'
)
# create bags
make_bag
(
restricted_dir
.
os_path
,
bag_info
=
bagit_metadata
,
checksums
=
[
'sha512'
])
make_bag
(
public_dir
.
os_path
,
bag_info
=
bagit_metadata
,
checksums
=
[
'sha512'
])
self
.
logger
.
debug
(
'created raw file bags'
)
# zip bags
def
zip_dir
(
zip_filepath
,
path
):
...
...
@@ -514,6 +525,7 @@ class StagingUploadFiles(UploadFiles):
zip_dir
(
packed_dir
.
join_file
(
'raw-restricted.bagit.zip'
).
os_path
,
restricted_dir
.
os_path
)
zip_dir
(
packed_dir
.
join_file
(
'raw-public.bagit.zip'
).
os_path
,
public_dir
.
os_path
)
self
.
logger
.
debug
(
'zipped bags'
)
# zip archives
def
create_zipfile
(
prefix
:
str
)
->
ZipFile
:
...
...
@@ -536,15 +548,20 @@ class StagingUploadFiles(UploadFiles):
archive_restricted_zip
.
close
()
archive_public_zip
.
close
()
self
.
logger
.
debug
(
'zipped archives'
)
# pack metadata
packed_metadata
=
PublicMetadata
(
packed_dir
.
os_path
)
packed_metadata
.
_create
(
self
.
_metadata
)
self
.
logger
.
debug
(
'packed metadata'
)
# move to public bucket
target_dir
=
DirectoryObject
(
config
.
files
.
public_bucket
,
self
.
upload_id
,
create
=
False
,
prefix
=
True
)
target_dir
=
DirectoryObject
(
config
.
files
.
public_bucket
,
self
.
upload_id
,
create
=
False
,
prefix
=
True
,
create_prefix
=
True
)
assert
not
target_dir
.
exists
()
shutil
.
move
(
packed_dir
.
os_path
,
target_dir
.
os_path
)
os
.
rename
(
packed_dir
.
os_path
,
target_dir
.
os_path
)
self
.
logger
.
debug
(
'moved to public bucket'
)
def
raw_file_manifest
(
self
,
path_prefix
:
str
=
None
)
->
Generator
[
str
,
None
,
None
]:
upload_prefix_len
=
len
(
self
.
_raw_dir
.
os_path
)
+
1
...
...
nomad/migration.py
View file @
32e89d75
...
...
@@ -572,7 +572,6 @@ class NomadCOEMigration:
# initialize package report
report
=
utils
.
POPO
()
report
.
total_source_calcs
=
0
report
.
total_calcs
=
0
report
.
failed_calcs
=
0
report
.
migrated_calcs
=
0
...
...
@@ -704,7 +703,7 @@ class NomadCOEMigration:
else
:
logger
.
info
(
'no successful calcs, skip publish'
)
report
.
missing_calcs
=
report
.
total_
source_
calcs
-
report
.
migrated_calcs
report
.
missing_calcs
=
report
.
total_calcs
-
report
.
migrated_calcs
logger
.
info
(
'migrated package'
,
**
report
)
for
key
,
value
in
report
.
items
():
...
...
nomad/normalizing/system.py
View file @
32e89d75
...
...
@@ -302,7 +302,7 @@ class SystemNormalizer(SystemBasedNormalizer):
nomad_classification
=
SystemNormalizer
.
translation_dict
[
matid_class
]
break
# Check to make sure a match was found in translating classes.
if
(
nomad_classification
is
None
)
or
(
nomad_classification
==
'Unknown'
)
:
if
nomad_classification
is
None
:
# Then something unexpected has happened with our system_type.
self
.
logger
.
error
(
'Matid classfication has given us an unexpected type: %s'
%
system_type
)
...
...
@@ -310,4 +310,7 @@ class SystemNormalizer(SystemBasedNormalizer):
if
nomad_classification
==
'Atom'
and
(
atoms
.
get_number_of_atoms
()
>
1
):
nomad_classification
=
'Molecule / Cluster'
if
nomad_classification
==
'Unknown'
:
self
.
logger
.
warning
(
'Could not determine system type.'
)
return
nomad_classification
nomad/processing/data.py
View file @
32e89d75
...
...
@@ -610,7 +610,7 @@ class Upload(Proc):
calc_metadata
.
update
(
upload_metadata
)
calc_with_metadata
.
apply_user_metadata
(
calc_metadata
)
logger
.
info
(
'prepared calc with metadata'
,
calc_id
=
calc_with_metadata
.
calc_id
)
logger
.
debug
(
'prepared calc with metadata'
,
calc_id
=
calc_with_metadata
.
calc_id
)
return
calc_with_metadata
result
.
calcs
=
[
apply_metadata
(
calc
)
for
calc
in
self
.
calcs
]
...
...
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment