Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
nomad-lab
nomad-FAIR
Commits
14cb7dee
Commit
14cb7dee
authored
Jan 12, 2019
by
Markus Scheidgen
Browse files
Removed upload_hash and replaced by upload_id.
parent
e63f46b1
Changes
18
Hide whitespace changes
Inline
Side-by-side
nomad/api/app.py
View file @
14cb7dee
...
@@ -95,7 +95,7 @@ def with_logger(func):
...
@@ -95,7 +95,7 @@ def with_logger(func):
args
=
inspect
.
getcallargs
(
wrapper
,
*
args
,
**
kwargs
)
args
=
inspect
.
getcallargs
(
wrapper
,
*
args
,
**
kwargs
)
logger_args
=
{
logger_args
=
{
k
:
v
for
k
,
v
in
args
.
items
()
k
:
v
for
k
,
v
in
args
.
items
()
if
k
in
[
'upload_id'
,
'upload_hash'
,
'calc_hash'
]}
if
k
in
[
'upload_id'
,
'calc_hash'
]}
logger
=
utils
.
get_logger
(
__name__
,
**
logger_args
)
logger
=
utils
.
get_logger
(
__name__
,
**
logger_args
)
args
.
update
(
logger
=
logger
)
args
.
update
(
logger
=
logger
)
try
:
try
:
...
...
nomad/api/archive.py
View file @
14cb7dee
...
@@ -42,19 +42,19 @@ class ArchiveCalcLogResource(Resource):
...
@@ -42,19 +42,19 @@ class ArchiveCalcLogResource(Resource):
@
api
.
response
(
401
,
'Not authorized to access the data.'
)
@
api
.
response
(
401
,
'Not authorized to access the data.'
)
@
api
.
response
(
200
,
'Archive data send'
,
headers
=
{
'Content-Type'
:
'application/plain'
})
@
api
.
response
(
200
,
'Archive data send'
,
headers
=
{
'Content-Type'
:
'application/plain'
})
@
login_if_available
@
login_if_available
def
get
(
self
,
upload_
hash
,
calc_hash
):
def
get
(
self
,
upload_
id
,
calc_hash
):
"""
"""
Get calculation processing log.
Get calculation processing log.
Calcs are references via *upload_
hash
*, *calc_hash* pairs.
Calcs are references via *upload_
id
*, *calc_hash* pairs.
"""
"""
archive_id
=
'%s/%s'
%
(
upload_
hash
,
calc_hash
)
archive_id
=
'%s/%s'
%
(
upload_
id
,
calc_hash
)
upload_files
=
UploadFiles
.
get
(
upload_files
=
UploadFiles
.
get
(
upload_
hash
,
is_authorized
=
create_authorization_predicate
(
upload_
hash
,
calc_hash
))
upload_
id
,
is_authorized
=
create_authorization_predicate
(
upload_
id
,
calc_hash
))
if
upload_files
is
None
:
if
upload_files
is
None
:
abort
(
404
,
message
=
'
Archive
%s does not exist.'
%
upload_
hash
)
abort
(
404
,
message
=
'
Upload
%s does not exist.'
%
upload_
id
)
try
:
try
:
return
send_file
(
return
send_file
(
...
@@ -63,7 +63,7 @@ class ArchiveCalcLogResource(Resource):
...
@@ -63,7 +63,7 @@ class ArchiveCalcLogResource(Resource):
as_attachment
=
True
,
as_attachment
=
True
,
attachment_filename
=
'%s.log'
%
archive_id
)
attachment_filename
=
'%s.log'
%
archive_id
)
except
Restricted
:
except
Restricted
:
abort
(
401
,
message
=
'Not authorized to access %s/%s.'
%
(
upload_
hash
,
calc_hash
))
abort
(
401
,
message
=
'Not authorized to access %s/%s.'
%
(
upload_
id
,
calc_hash
))
except
KeyError
:
except
KeyError
:
abort
(
404
,
message
=
'Calculation %s does not exist.'
%
archive_id
)
abort
(
404
,
message
=
'Calculation %s does not exist.'
%
archive_id
)
...
@@ -75,19 +75,19 @@ class ArchiveCalcResource(Resource):
...
@@ -75,19 +75,19 @@ class ArchiveCalcResource(Resource):
@
api
.
response
(
401
,
'Not authorized to access the data.'
)
@
api
.
response
(
401
,
'Not authorized to access the data.'
)
@
api
.
response
(
200
,
'Archive data send'
)
@
api
.
response
(
200
,
'Archive data send'
)
@
login_if_available
@
login_if_available
def
get
(
self
,
upload_
hash
,
calc_hash
):
def
get
(
self
,
upload_
id
,
calc_hash
):
"""
"""
Get calculation data in archive form.
Get calculation data in archive form.
Calcs are references via *upload_
hash
*, *calc_hash* pairs.
Calcs are references via *upload_
id
*, *calc_hash* pairs.
"""
"""
archive_id
=
'%s/%s'
%
(
upload_
hash
,
calc_hash
)
archive_id
=
'%s/%s'
%
(
upload_
id
,
calc_hash
)
upload_file
=
UploadFiles
.
get
(
upload_file
=
UploadFiles
.
get
(
upload_
hash
,
is_authorized
=
create_authorization_predicate
(
upload_
hash
,
calc_hash
))
upload_
id
,
is_authorized
=
create_authorization_predicate
(
upload_
id
,
calc_hash
))
if
upload_file
is
None
:
if
upload_file
is
None
:
abort
(
404
,
message
=
'Archive %s does not exist.'
%
upload_
hash
)
abort
(
404
,
message
=
'Archive %s does not exist.'
%
upload_
id
)
try
:
try
:
return
send_file
(
return
send_file
(
...
@@ -96,7 +96,7 @@ class ArchiveCalcResource(Resource):
...
@@ -96,7 +96,7 @@ class ArchiveCalcResource(Resource):
as_attachment
=
True
,
as_attachment
=
True
,
attachment_filename
=
'%s.json'
%
archive_id
)
attachment_filename
=
'%s.json'
%
archive_id
)
except
Restricted
:
except
Restricted
:
abort
(
401
,
message
=
'Not authorized to access %s/%s.'
%
(
upload_
hash
,
calc_hash
))
abort
(
401
,
message
=
'Not authorized to access %s/%s.'
%
(
upload_
id
,
calc_hash
))
except
KeyError
:
except
KeyError
:
abort
(
404
,
message
=
'Calculation %s does not exist.'
%
archive_id
)
abort
(
404
,
message
=
'Calculation %s does not exist.'
%
archive_id
)
...
...
nomad/api/auth.py
View file @
14cb7dee
...
@@ -149,7 +149,7 @@ class TokenResource(Resource):
...
@@ -149,7 +149,7 @@ class TokenResource(Resource):
'there is no token for you.'
)
'there is no token for you.'
)
def
create_authorization_predicate
(
upload_
hash
,
calc_hash
=
None
):
def
create_authorization_predicate
(
upload_
id
,
calc_hash
=
None
):
"""
"""
Returns a predicate that determines if the logged in user has the authorization
Returns a predicate that determines if the logged in user has the authorization
to access the given upload and calculation.
to access the given upload and calculation.
...
@@ -160,18 +160,18 @@ def create_authorization_predicate(upload_hash, calc_hash=None):
...
@@ -160,18 +160,18 @@ def create_authorization_predicate(upload_hash, calc_hash=None):
return
False
return
False
# look in repository
# look in repository
upload
=
coe_repo
.
Upload
.
from_upload_
hash
(
upload_
hash
)
upload
=
coe_repo
.
Upload
.
from_upload_
id
(
upload_
id
)
if
upload
is
not
None
:
if
upload
is
not
None
:
return
upload
.
user_id
==
g
.
user
.
user_id
return
upload
.
user_id
==
g
.
user
.
user_id
# look in staging
# look in staging
staging_upload
=
processing
.
Upload
.
get
(
upload_
hash
)
staging_upload
=
processing
.
Upload
.
get
(
upload_
id
)
if
staging_upload
is
not
None
:
if
staging_upload
is
not
None
:
return
str
(
g
.
user
.
user_id
)
==
str
(
staging_upload
.
user_id
)
return
str
(
g
.
user
.
user_id
)
==
str
(
staging_upload
.
user_id
)
# There are no db entries for the given resource
# There are no db entries for the given resource
if
files
.
UploadFiles
.
get
(
upload_
hash
)
is
not
None
:
if
files
.
UploadFiles
.
get
(
upload_
id
)
is
not
None
:
logger
=
utils
.
get_logger
(
__name__
,
upload_
hash
=
upload_
hash
,
calc_hash
=
calc_hash
)
logger
=
utils
.
get_logger
(
__name__
,
upload_
id
=
upload_
id
,
calc_hash
=
calc_hash
)
logger
.
error
(
'Upload files without respective db entry'
)
logger
.
error
(
'Upload files without respective db entry'
)
raise
KeyError
raise
KeyError
...
...
nomad/api/common.py
View file @
14cb7dee
...
@@ -45,10 +45,10 @@ pagination_request_parser.add_argument(
...
@@ -45,10 +45,10 @@ pagination_request_parser.add_argument(
def
calc_route
(
ns
,
prefix
:
str
=
''
):
def
calc_route
(
ns
,
prefix
:
str
=
''
):
""" A resource decorator for /<upload>/<calc> based routes. """
""" A resource decorator for /<upload>/<calc> based routes. """
def
decorator
(
func
):
def
decorator
(
func
):
ns
.
route
(
'%s/<string:upload_
hash
>/<string:calc_hash>'
%
prefix
)(
ns
.
route
(
'%s/<string:upload_
id
>/<string:calc_hash>'
%
prefix
)(
api
.
doc
(
params
=
{
api
.
doc
(
params
=
{
'upload_
hash
'
:
'The unique
hash
for the requested upload.'
,
'upload_
id
'
:
'The unique
id
for the requested upload.'
,
'calc_hash'
:
'The unique hash for the requested calculation.'
'calc_hash'
:
'The
upload
unique hash for the requested calculation.'
})(
func
)
})(
func
)
)
)
return
decorator
return
decorator
nomad/api/raw.py
View file @
14cb7dee
...
@@ -38,9 +38,9 @@ raw_file_from_path_parser = api.parser()
...
@@ -38,9 +38,9 @@ raw_file_from_path_parser = api.parser()
raw_file_from_path_parser
.
add_argument
(
**
raw_file_compress_argument
)
raw_file_from_path_parser
.
add_argument
(
**
raw_file_compress_argument
)
@
ns
.
route
(
'/<string:upload_
hash
>/<path:path>'
)
@
ns
.
route
(
'/<string:upload_
id
>/<path:path>'
)
@
api
.
doc
(
params
=
{
@
api
.
doc
(
params
=
{
'upload_
hash
'
:
'The unique hash for the requested upload.'
,
'upload_
id
'
:
'The unique hash for the requested upload.'
,
'path'
:
'The path to a file or directory.'
'path'
:
'The path to a file or directory.'
})
})
@
api
.
header
(
'Content-Type'
,
'application/gz'
)
@
api
.
header
(
'Content-Type'
,
'application/gz'
)
...
@@ -51,7 +51,7 @@ class RawFileFromPathResource(Resource):
...
@@ -51,7 +51,7 @@ class RawFileFromPathResource(Resource):
@
api
.
response
(
200
,
'File(s) send'
,
headers
=
{
'Content-Type'
:
'application/gz'
})
@
api
.
response
(
200
,
'File(s) send'
,
headers
=
{
'Content-Type'
:
'application/gz'
})
@
api
.
expect
(
raw_file_from_path_parser
,
validate
=
True
)
@
api
.
expect
(
raw_file_from_path_parser
,
validate
=
True
)
@
login_if_available
@
login_if_available
def
get
(
self
,
upload_
hash
:
str
,
path
:
str
):
def
get
(
self
,
upload_
id
:
str
,
path
:
str
):
"""
"""
Get a single raw calculation file or whole directory from a given upload.
Get a single raw calculation file or whole directory from a given upload.
...
@@ -63,9 +63,9 @@ class RawFileFromPathResource(Resource):
...
@@ -63,9 +63,9 @@ class RawFileFromPathResource(Resource):
upload_filepath
=
path
upload_filepath
=
path
upload_files
=
UploadFiles
.
get
(
upload_files
=
UploadFiles
.
get
(
upload_
hash
,
create_authorization_predicate
(
upload_
hash
))
upload_
id
,
create_authorization_predicate
(
upload_
id
))
if
upload_files
is
None
:
if
upload_files
is
None
:
abort
(
404
,
message
=
'The upload with hash %s does not exist.'
%
upload_
hash
)
abort
(
404
,
message
=
'The upload with hash %s does not exist.'
%
upload_
id
)
if
upload_filepath
[
-
1
:]
==
'*'
:
if
upload_filepath
[
-
1
:]
==
'*'
:
upload_filepath
=
upload_filepath
[
0
:
-
1
]
upload_filepath
=
upload_filepath
[
0
:
-
1
]
...
@@ -74,7 +74,7 @@ class RawFileFromPathResource(Resource):
...
@@ -74,7 +74,7 @@ class RawFileFromPathResource(Resource):
abort
(
404
,
message
=
'There are no files for %s.'
%
upload_filepath
)
abort
(
404
,
message
=
'There are no files for %s.'
%
upload_filepath
)
else
:
else
:
compress
=
request
.
args
.
get
(
'compress'
,
None
)
is
not
None
compress
=
request
.
args
.
get
(
'compress'
,
None
)
is
not
None
return
respond_to_get_raw_files
(
upload_
hash
,
files
,
compress
)
return
respond_to_get_raw_files
(
upload_
id
,
files
,
compress
)
try
:
try
:
return
send_file
(
return
send_file
(
...
@@ -83,7 +83,7 @@ class RawFileFromPathResource(Resource):
...
@@ -83,7 +83,7 @@ class RawFileFromPathResource(Resource):
as_attachment
=
True
,
as_attachment
=
True
,
attachment_filename
=
os
.
path
.
basename
(
upload_filepath
))
attachment_filename
=
os
.
path
.
basename
(
upload_filepath
))
except
Restricted
:
except
Restricted
:
abort
(
401
,
message
=
'Not authorized to access upload %s.'
%
upload_
hash
)
abort
(
401
,
message
=
'Not authorized to access upload %s.'
%
upload_
id
)
except
KeyError
:
except
KeyError
:
files
=
list
(
file
for
file
in
upload_files
.
raw_file_manifest
(
upload_filepath
))
files
=
list
(
file
for
file
in
upload_files
.
raw_file_manifest
(
upload_filepath
))
if
len
(
files
)
==
0
:
if
len
(
files
)
==
0
:
...
@@ -106,9 +106,9 @@ raw_files_request_parser.add_argument(
...
@@ -106,9 +106,9 @@ raw_files_request_parser.add_argument(
'files'
,
required
=
True
,
type
=
str
,
help
=
'Comma separated list of files to download.'
,
location
=
'args'
)
'files'
,
required
=
True
,
type
=
str
,
help
=
'Comma separated list of files to download.'
,
location
=
'args'
)
@
ns
.
route
(
'/<string:upload_
hash
>'
)
@
ns
.
route
(
'/<string:upload_
id
>'
)
@
api
.
doc
(
params
=
{
@
api
.
doc
(
params
=
{
'upload_
hash
'
:
'The unique hash for the requested upload.'
'upload_
id
'
:
'The unique hash for the requested upload.'
})
})
class
RawFilesResource
(
Resource
):
class
RawFilesResource
(
Resource
):
@
api
.
doc
(
'get_files'
)
@
api
.
doc
(
'get_files'
)
...
@@ -116,7 +116,7 @@ class RawFilesResource(Resource):
...
@@ -116,7 +116,7 @@ class RawFilesResource(Resource):
@
api
.
response
(
200
,
'File(s) send'
,
headers
=
{
'Content-Type'
:
'application/gz'
})
@
api
.
response
(
200
,
'File(s) send'
,
headers
=
{
'Content-Type'
:
'application/gz'
})
@
api
.
expect
(
raw_files_request_model
,
validate
=
True
)
@
api
.
expect
(
raw_files_request_model
,
validate
=
True
)
@
login_if_available
@
login_if_available
def
post
(
self
,
upload_
hash
):
def
post
(
self
,
upload_
id
):
"""
"""
Download multiple raw calculation files in a .zip file.
Download multiple raw calculation files in a .zip file.
Zip files are streamed; instead of 401 errors, the zip file will just not contain
Zip files are streamed; instead of 401 errors, the zip file will just not contain
...
@@ -126,14 +126,14 @@ class RawFilesResource(Resource):
...
@@ -126,14 +126,14 @@ class RawFilesResource(Resource):
compress
=
json_data
.
get
(
'compress'
,
False
)
compress
=
json_data
.
get
(
'compress'
,
False
)
files
=
[
file
.
strip
()
for
file
in
json_data
[
'files'
]]
files
=
[
file
.
strip
()
for
file
in
json_data
[
'files'
]]
return
respond_to_get_raw_files
(
upload_
hash
,
files
,
compress
)
return
respond_to_get_raw_files
(
upload_
id
,
files
,
compress
)
@
api
.
doc
(
'get_files_alternate'
)
@
api
.
doc
(
'get_files_alternate'
)
@
api
.
response
(
404
,
'The upload or path does not exist'
)
@
api
.
response
(
404
,
'The upload or path does not exist'
)
@
api
.
response
(
200
,
'File(s) send'
,
headers
=
{
'Content-Type'
:
'application/gz'
})
@
api
.
response
(
200
,
'File(s) send'
,
headers
=
{
'Content-Type'
:
'application/gz'
})
@
api
.
expect
(
raw_files_request_parser
,
validate
=
True
)
@
api
.
expect
(
raw_files_request_parser
,
validate
=
True
)
@
login_if_available
@
login_if_available
def
get
(
self
,
upload_
hash
):
def
get
(
self
,
upload_
id
):
"""
"""
Download multiple raw calculation files.
Download multiple raw calculation files.
Download multiple raw calculation files in a .zip file.
Download multiple raw calculation files in a .zip file.
...
@@ -147,14 +147,14 @@ class RawFilesResource(Resource):
...
@@ -147,14 +147,14 @@ class RawFilesResource(Resource):
abort
(
400
,
message
=
"No files argument given."
)
abort
(
400
,
message
=
"No files argument given."
)
files
=
[
file
.
strip
()
for
file
in
files_str
.
split
(
','
)]
files
=
[
file
.
strip
()
for
file
in
files_str
.
split
(
','
)]
return
respond_to_get_raw_files
(
upload_
hash
,
files
,
compress
)
return
respond_to_get_raw_files
(
upload_
id
,
files
,
compress
)
def
respond_to_get_raw_files
(
upload_
hash
,
files
,
compress
=
False
):
def
respond_to_get_raw_files
(
upload_
id
,
files
,
compress
=
False
):
upload_files
=
UploadFiles
.
get
(
upload_files
=
UploadFiles
.
get
(
upload_
hash
,
create_authorization_predicate
(
upload_
hash
))
upload_
id
,
create_authorization_predicate
(
upload_
id
))
if
upload_files
is
None
:
if
upload_files
is
None
:
abort
(
404
,
message
=
'The upload with hash %s does not exist.'
%
upload_
hash
)
abort
(
404
,
message
=
'The upload with hash %s does not exist.'
%
upload_
id
)
def
generator
():
def
generator
():
""" Stream a zip file with all files using zipstream. """
""" Stream a zip file with all files using zipstream. """
...
@@ -188,5 +188,5 @@ def respond_to_get_raw_files(upload_hash, files, compress=False):
...
@@ -188,5 +188,5 @@ def respond_to_get_raw_files(upload_hash, files, compress=False):
yield
chunk
yield
chunk
response
=
Response
(
stream_with_context
(
generator
()),
mimetype
=
'application/zip'
)
response
=
Response
(
stream_with_context
(
generator
()),
mimetype
=
'application/zip'
)
response
.
headers
[
'Content-Disposition'
]
=
'attachment; filename={}'
.
format
(
'%s.zip'
%
upload_
hash
)
response
.
headers
[
'Content-Disposition'
]
=
'attachment; filename={}'
.
format
(
'%s.zip'
%
upload_
id
)
return
response
return
response
nomad/api/repo.py
View file @
14cb7dee
...
@@ -35,19 +35,19 @@ class RepoCalcResource(Resource):
...
@@ -35,19 +35,19 @@ class RepoCalcResource(Resource):
@
api
.
response
(
404
,
'The upload or calculation does not exist'
)
@
api
.
response
(
404
,
'The upload or calculation does not exist'
)
@
api
.
response
(
200
,
'Metadata send'
)
@
api
.
response
(
200
,
'Metadata send'
)
@
api
.
doc
(
'get_repo_calc'
)
@
api
.
doc
(
'get_repo_calc'
)
def
get
(
self
,
upload_
hash
,
calc_hash
):
def
get
(
self
,
upload_
id
,
calc_hash
):
"""
"""
Get calculation metadata in repository form.
Get calculation metadata in repository form.
Repository metadata only entails the quanties shown in the repository.
Repository metadata only entails the quanties shown in the repository.
This is basically the elastic search index entry for the
This is basically the elastic search index entry for the
requested calculations. Calcs are references via *upload_
hash
*, *calc_hash*
requested calculations. Calcs are references via *upload_
id
*, *calc_hash*
pairs.
pairs.
"""
"""
try
:
try
:
return
RepoCalc
.
get
(
id
=
'%s/%s'
%
(
upload_
hash
,
calc_hash
)).
json_dict
,
200
return
RepoCalc
.
get
(
id
=
'%s/%s'
%
(
upload_
id
,
calc_hash
)).
json_dict
,
200
except
NotFoundError
:
except
NotFoundError
:
abort
(
404
,
message
=
'There is no calculation for %s/%s'
%
(
upload_
hash
,
calc_hash
))
abort
(
404
,
message
=
'There is no calculation for %s/%s'
%
(
upload_
id
,
calc_hash
))
except
Exception
as
e
:
except
Exception
as
e
:
abort
(
500
,
message
=
str
(
e
))
abort
(
500
,
message
=
str
(
e
))
...
...
nomad/api/upload.py
View file @
14cb7dee
...
@@ -55,12 +55,7 @@ upload_model = api.inherit('UploadProcessing', proc_model, {
...
@@ -55,12 +55,7 @@ upload_model = api.inherit('UploadProcessing', proc_model, {
description
=
'The name of the upload. This can be provided during upload '
description
=
'The name of the upload. This can be provided during upload '
'using the name query parameter.'
),
'using the name query parameter.'
),
'upload_id'
:
fields
.
String
(
'upload_id'
:
fields
.
String
(
description
=
'The unique id for the upload. Its a random uuid and '
description
=
'The unique id for the upload.'
),
'and used within nomad as long as no upload_hash is available.'
),
'upload_hash'
:
fields
.
String
(
description
=
'The unique upload hash. It is based on the uploaded content and '
'used within nomad to identify uploads.'
),
'additional_metadata'
:
fields
.
Arbitrary
,
'additional_metadata'
:
fields
.
Arbitrary
,
'local_path'
:
fields
.
String
,
'local_path'
:
fields
.
String
,
'upload_time'
:
fields
.
DateTime
(
dt_format
=
'iso8601'
),
'upload_time'
:
fields
.
DateTime
(
dt_format
=
'iso8601'
),
...
...
nomad/client.py
View file @
14cb7dee
...
@@ -143,7 +143,7 @@ class CalcProcReproduction:
...
@@ -143,7 +143,7 @@ class CalcProcReproduction:
"""
"""
def
__init__
(
self
,
archive_id
:
str
,
override
:
bool
=
False
)
->
None
:
def
__init__
(
self
,
archive_id
:
str
,
override
:
bool
=
False
)
->
None
:
self
.
calc_hash
=
utils
.
archive
.
calc_hash
(
archive_id
)
self
.
calc_hash
=
utils
.
archive
.
calc_hash
(
archive_id
)
self
.
upload_
hash
=
utils
.
archive
.
upload_
hash
(
archive_id
)
self
.
upload_
id
=
utils
.
archive
.
upload_
id
(
archive_id
)
self
.
mainfile
=
None
self
.
mainfile
=
None
self
.
parser
=
None
self
.
parser
=
None
self
.
logger
=
utils
.
get_logger
(
__name__
,
archive_id
=
archive_id
)
self
.
logger
=
utils
.
get_logger
(
__name__
,
archive_id
=
archive_id
)
...
@@ -156,7 +156,7 @@ class CalcProcReproduction:
...
@@ -156,7 +156,7 @@ class CalcProcReproduction:
# download with request, since bravado does not support streaming
# download with request, since bravado does not support streaming
# TODO currently only downloads mainfile
# TODO currently only downloads mainfile
self
.
logger
.
info
(
'Downloading calc.'
)
self
.
logger
.
info
(
'Downloading calc.'
)
req
=
requests
.
get
(
'%s/raw/%s/%s'
%
(
api_base
,
self
.
upload_
hash
,
os
.
path
.
dirname
(
self
.
mainfile
)),
stream
=
True
)
req
=
requests
.
get
(
'%s/raw/%s/%s'
%
(
api_base
,
self
.
upload_
id
,
os
.
path
.
dirname
(
self
.
mainfile
)),
stream
=
True
)
with
open
(
local_path
,
'wb'
)
as
f
:
with
open
(
local_path
,
'wb'
)
as
f
:
for
chunk
in
req
.
iter_content
(
chunk_size
=
1024
):
for
chunk
in
req
.
iter_content
(
chunk_size
=
1024
):
f
.
write
(
chunk
)
f
.
write
(
chunk
)
...
...
nomad/coe_repo/upload.py
View file @
14cb7dee
...
@@ -89,7 +89,7 @@ class UploadMetaData:
...
@@ -89,7 +89,7 @@ class UploadMetaData:
class
Upload
(
Base
,
datamodel
.
Upload
):
# type: ignore
class
Upload
(
Base
,
datamodel
.
Upload
):
# type: ignore
__tablename__
=
'uploads'
__tablename__
=
'uploads'
upload_id
=
Column
(
Integer
,
primary_key
=
True
,
autoincrement
=
True
)
coe_
upload_id
=
Column
(
'upload_id'
,
Integer
,
primary_key
=
True
,
autoincrement
=
True
)
upload_name
=
Column
(
String
)
upload_name
=
Column
(
String
)
user_id
=
Column
(
Integer
,
ForeignKey
(
'users.user_id'
))
user_id
=
Column
(
Integer
,
ForeignKey
(
'users.user_id'
))
is_processed
=
Column
(
Boolean
)
is_processed
=
Column
(
Boolean
)
...
@@ -100,17 +100,17 @@ class Upload(Base, datamodel.Upload): # type: ignore
...
@@ -100,17 +100,17 @@ class Upload(Base, datamodel.Upload): # type: ignore
@
classmethod
@
classmethod
def
load_from
(
cls
,
obj
):
def
load_from
(
cls
,
obj
):
return
Upload
.
from_upload_
hash
(
obj
.
upload_
hash
)
return
Upload
.
from_upload_
id
(
obj
.
upload_
id
)
@
staticmethod
@
staticmethod
def
from_upload_
hash
(
upload_
hash
)
->
'Upload'
:
def
from_upload_
id
(
upload_
id
)
->
'Upload'
:
repo_db
=
infrastructure
.
repository_db
repo_db
=
infrastructure
.
repository_db
uploads
=
repo_db
.
query
(
Upload
).
filter_by
(
upload_name
=
upload_
hash
)
uploads
=
repo_db
.
query
(
Upload
).
filter_by
(
upload_name
=
upload_
id
)
assert
uploads
.
count
()
<=
1
,
'Upload hash/name must be unique'
assert
uploads
.
count
()
<=
1
,
'Upload hash/name must be unique'
return
uploads
.
first
()
return
uploads
.
first
()
@
property
@
property
def
upload_
hash
(
self
):
def
upload_
id
(
self
):
return
self
.
upload_name
return
self
.
upload_name
@
property
@
property
...
@@ -140,17 +140,14 @@ class Upload(Base, datamodel.Upload): # type: ignore
...
@@ -140,17 +140,14 @@ class Upload(Base, datamodel.Upload): # type: ignore
repo_db
=
infrastructure
.
repository_db
repo_db
=
infrastructure
.
repository_db
repo_db
.
begin
()
repo_db
.
begin
()
logger
=
utils
.
get_logger
(
logger
=
utils
.
get_logger
(
__name__
,
upload_id
=
upload
.
upload_id
)
__name__
,
upload_id
=
upload
.
upload_id
,
upload_hash
=
upload
.
upload_hash
)
result
=
None
result
=
None
try
:
try
:
# create upload
# create upload
coe_upload
=
Upload
(
coe_upload
=
Upload
(
upload_name
=
upload
.
upload_
hash
,
upload_name
=
upload
.
upload_
id
,
created
=
meta_data
.
get
(
'_upload_time'
,
upload
.
upload_time
),
created
=
meta_data
.
get
(
'_upload_time'
,
upload
.
upload_time
),
user
=
upload
.
uploader
,
user
=
upload
.
uploader
,
is_processed
=
True
)
is_processed
=
True
)
...
...
nomad/datamodel.py
View file @
14cb7dee
...
@@ -76,7 +76,6 @@ class Upload(Entity):
...
@@ -76,7 +76,6 @@ class Upload(Entity):
Attributes:
Attributes:
upload_id(str): The unique random id that each upload has
upload_id(str): The unique random id that each upload has
upload_hash(str): The hash/checksum that describes unique uploads
upload_time(datatime): The upload time
upload_time(datatime): The upload time
uploader(repo.User): The user that uploaded this upload
uploader(repo.User): The user that uploaded this upload
calcs(Iterable[Calc]): An iterable over the calculations of this upload
calcs(Iterable[Calc]): An iterable over the calculations of this upload
...
@@ -85,10 +84,6 @@ class Upload(Entity):
...
@@ -85,10 +84,6 @@ class Upload(Entity):
def
upload_id
(
self
)
->
str
:
def
upload_id
(
self
)
->
str
:
return
'<not assigned>'
return
'<not assigned>'
@
property
def
upload_hash
(
self
)
->
str
:
raise
NotImplementedError
@
property
@
property
def
upload_time
(
self
)
->
Type
[
datetime
.
datetime
]:
def
upload_time
(
self
)
->
Type
[
datetime
.
datetime
]:
raise
NotImplementedError
raise
NotImplementedError
...
...
nomad/files.py
View file @
14cb7dee
...
@@ -585,10 +585,6 @@ class StagingUploadFiles(UploadFiles):
...
@@ -585,10 +585,6 @@ class StagingUploadFiles(UploadFiles):
return
utils
.
websave_hash
(
hash
.
digest
(),
utils
.
default_hash_len
)
return
utils
.
websave_hash
(
hash
.
digest
(),
utils
.
default_hash_len
)
def
upload_hash
(
self
)
->
str
:
""" Returns: A hash for the whole upload. It is only available if upload *is_bag*. """
pass
class
ArchiveBasedStagingUploadFiles
(
StagingUploadFiles
):
class
ArchiveBasedStagingUploadFiles
(
StagingUploadFiles
):
"""
"""
...
...
nomad/processing/data.py
View file @
14cb7dee
...
@@ -100,15 +100,10 @@ class Calc(Proc, datamodel.Calc):
...
@@ -100,15 +100,10 @@ class Calc(Proc, datamodel.Calc):
self
.
_upload_files
=
ArchiveBasedStagingUploadFiles
(
self
.
upload_id
,
is_authorized
=
lambda
:
True
,
local_path
=
self
.
upload
.
local_path
)
self
.
_upload_files
=
ArchiveBasedStagingUploadFiles
(
self
.
upload_id
,
is_authorized
=
lambda
:
True
,
local_path
=
self
.
upload
.
local_path
)
return
self
.
_upload_files
return
self
.
_upload_files
@
property
def
upload_hash
(
self
):
return
utils
.
archive
.
upload_hash
(
self
.
archive_id
)
def
get_logger
(
self
,
**
kwargs
):
def
get_logger
(
self
,
**
kwargs
):
logger
=
super
().
get_logger
()
logger
=
super
().
get_logger
()
logger
=
logger
.
bind
(
logger
=
logger
.
bind
(
upload_id
=
self
.
upload_id
,
mainfile
=
self
.
mainfile
,
upload_id
=
self
.
upload_id
,
mainfile
=
self
.
mainfile
,
calc_hash
=
self
.
calc_hash
,
upload_hash
=
self
.
upload_hash
,
calc_hash
=
self
.
calc_hash
,
archive_id
=
self
.
archive_id
,
**
kwargs
)
archive_id
=
self
.
archive_id
,
**
kwargs
)
return
logger
return
logger
...
@@ -240,7 +235,7 @@ class Calc(Proc, datamodel.Calc):
...
@@ -240,7 +235,7 @@ class Calc(Proc, datamodel.Calc):
def
archiving
(
self
):
def
archiving
(
self
):
logger
=
self
.
get_logger
()
logger
=
self
.
get_logger
()
upload_hash
,
calc_hash
=
self
.
archive_id
.
split
(
'/'
)
_
,
calc_hash
=
self
.
archive_id
.
split
(
'/'
)
additional
=
dict
(
additional
=
dict
(
mainfile
=
self
.
mainfile
,
mainfile
=
self
.
mainfile
,
upload_time
=
self
.
upload
.
upload_time
,
upload_time
=
self
.
upload
.
upload_time
,
...
@@ -254,7 +249,6 @@ class Calc(Proc, datamodel.Calc):
...
@@ -254,7 +249,6 @@ class Calc(Proc, datamodel.Calc):
repo_calc
=
RepoCalc
.
create_from_backend
(
repo_calc
=
RepoCalc
.
create_from_backend
(
self
.
_parser_backend
,
self
.
_parser_backend
,
additional
=
additional
,
additional
=
additional
,
upload_hash
=
upload_hash
,
calc_hash
=
calc_hash
,
calc_hash
=
calc_hash
,
upload_id
=
self
.
upload_id
)
upload_id
=
self
.
upload_id
)
repo_calc
.
persist
()
repo_calc
.
persist
()
...
@@ -292,7 +286,6 @@ class Upload(Chord, datamodel.Upload):
...
@@ -292,7 +286,6 @@ class Upload(Chord, datamodel.Upload):
upload_id: the upload id generated by the database
upload_id: the upload id generated by the database
is_private: true if the upload and its derivitaves are only visible to the uploader
is_private: true if the upload and its derivitaves are only visible to the uploader