Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
nomad-lab
nomad-FAIR
Commits
e0ca6a9d
Commit
e0ca6a9d
authored
May 18, 2020
by
Markus Scheidgen
Browse files
Merge branch 'v0.8.0-bugfixes' into 'v0.8.0'
V0.8.0 bugfixes See merge request
!109
parents
5aa6b5c2
cbce84c9
Pipeline
#75115
passed with stages
in 20 minutes and 9 seconds
Changes
26
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
.gitignore
View file @
e0ca6a9d
...
...
@@ -26,3 +26,4 @@ nomad.yaml
build/
dist/
setup.json
parser.osio.log
examples/client.py
View file @
e0ca6a9d
...
...
@@ -3,7 +3,7 @@ from nomad.client import ArchiveQuery
from
nomad.metainfo
import
units
# this will not be necessary, once this is the official NOMAD version
config
.
client
.
url
=
'http
s
://labdev-nomad.esc.rzg.mpg.de/fairdi/nomad/testing-major/api'
config
.
client
.
url
=
'http://labdev-nomad.esc.rzg.mpg.de/fairdi/nomad/testing-major/api'
query
=
ArchiveQuery
(
query
=
{
...
...
gui/src/components/About.js
View file @
e0ca6a9d
...
...
@@ -9,6 +9,26 @@ import { domains } from './domains'
import
{
Grid
,
Card
,
CardContent
,
Typography
,
makeStyles
,
Link
}
from
'
@material-ui/core
'
import
{
Link
as
RouterLink
,
useHistory
}
from
'
react-router-dom
'
export
const
CodeList
=
()
=>
{
const
{
info
}
=
useContext
(
apiContext
)
if
(
!
info
)
{
return
'
...
'
}
return
info
.
codes
.
reduce
((
result
,
code
,
index
)
=>
{
if
(
index
!==
0
)
{
result
.
push
(
'
,
'
)
}
if
(
code
.
code_homepage
)
{
result
.
push
(
<
Link
target
=
"
external
"
key
=
{
code
.
code_name
}
href
=
{
code
.
code_homepage
}
>
{
code
.
code_name
}
<
/Link>
)
}
else
{
result
.
push
(
code
.
code_name
)
}
return
result
},
[])
}
const
useCardStyles
=
makeStyles
(
theme
=>
({
title
:
{
marginBottom
:
theme
.
spacing
(
1
)
...
...
@@ -173,7 +193,7 @@ export default function About() {
You can inspect the Archive form and extracted metadata before
publishing your data.
</p>
<p>NOMAD supports most community codes:
{info ? info.codes.join(
'
,
'
) :
'
...
'
}
</p>
<p>NOMAD supports most community codes:
<CodeList/>
</p>
<p>
To use NOMAD's parsers and normalizers outside of NOMAD.
Read <Link href="">here</Link> on how to install
...
...
gui/src/components/DatasetPage.js
View file @
e0ca6a9d
...
...
@@ -3,8 +3,8 @@ import { errorContext } from './errors'
import
{
apiContext
}
from
'
./api
'
import
Search
from
'
./search/Search
'
import
{
Typography
,
makeStyles
}
from
'
@material-ui/core
'
import
{
D
at
asetAc
tion
s
,
DOI
}
from
'
./search/DatasetList
'
import
{
matchPath
,
useLocation
,
useHistory
,
useRouteMatch
}
from
'
react-router
'
import
{
m
at
chPath
,
useLoca
tion
,
useRouteMatch
}
from
'
react-router
'
import
{
DOI
}
from
'
./search/DatasetList
'
export
const
help
=
`
This page allows you to **inspect** and **download** NOMAD datasets. It alsow allows you
...
...
@@ -31,7 +31,6 @@ export default function DatasetPage() {
const
{
raiseError
}
=
useContext
(
errorContext
)
const
location
=
useLocation
()
const
match
=
useRouteMatch
()
const
history
=
useHistory
()
const
{
datasetId
}
=
matchPath
(
location
.
pathname
,
{
path
:
`
${
match
.
path
}
/:datasetId`
...
...
@@ -56,18 +55,11 @@ export default function DatasetPage() {
})
},
[
location
.
pathname
,
api
])
const
handleChange
=
dataset
=>
{
if
(
dataset
)
{
setDataset
({
dataset
:
dataset
})
}
else
{
history
.
goBack
()
}
}
if
(
!
dataset
)
{
return
<
div
>
loading
...
<
/div
>
}
console
.
log
(
'
### DatasetPage
'
,
dataset
)
return
<
div
>
<
div
className
=
{
classes
.
header
}
>
<
div
className
=
{
classes
.
description
}
>
...
...
@@ -76,18 +68,11 @@ export default function DatasetPage() {
dataset
{
dataset
.
doi
?
<
span
>
,
with
DOI
<
DOI
doi
=
{
dataset
.
doi
}
/></
span
>
:
''
}
<
/Typography
>
<
/div
>
<
div
className
=
{
classes
.
actions
}
>
{
dataset
&&
dataset
.
example
&&
<
DatasetActions
dataset
=
{
dataset
}
onChange
=
{
handleChange
}
/
>
}
<
/div
>
<
/div
>
<
Search
initialQuery
=
{{
owner
:
'
all
'
}}
query
=
{{
dataset_id
:
datasetId
}}
query
=
{{
dataset_id
:
[
datasetId
]
}}
ownerTypes
=
{[
'
all
'
,
'
public
'
]}
initialResultTab
=
"
entries
"
availableResultTabs
=
{[
'
entries
'
,
'
groups
'
,
'
datasets
'
]}
...
...
gui/src/components/api.js
View file @
e0ca6a9d
...
...
@@ -125,7 +125,7 @@ function handleApiError(e) {
let
error
=
null
if
(
e
.
response
)
{
const
body
=
e
.
response
.
body
const
message
=
(
body
&&
(
body
.
d
es
cription
||
body
.
m
es
sage
))
||
e
.
response
.
statusText
const
message
=
(
body
&&
(
body
.
m
es
sage
||
body
.
d
es
cription
))
||
e
.
response
.
statusText
const
errorMessage
=
`
${
message
}
(
${
e
.
response
.
status
}
)`
if
(
e
.
response
.
status
===
404
)
{
error
=
new
DoesNotExist
(
errorMessage
)
...
...
gui/src/components/search/DatasetList.js
View file @
e0ca6a9d
...
...
@@ -131,7 +131,7 @@ class DatasetActionsUnstyled extends React.Component {
const
canAssignDOI
=
!
doi
const
canDelete
=
!
doi
const
query
=
{
dataset_id
:
dataset
.
id
}
const
query
=
{
dataset_id
:
[
dataset
.
dataset_
id
]
}
return
<
FormGroup
row
classes
=
{{
root
:
classes
.
group
}}
>
{
search
&&
<
Tooltip
title
=
"
Open a search page with entries from this dataset only.
"
>
...
...
@@ -180,6 +180,7 @@ class DatasetListUnstyled extends React.Component {
data
:
PropTypes
.
object
,
total
:
PropTypes
.
number
,
onChange
:
PropTypes
.
func
.
isRequired
,
onEdit
:
PropTypes
.
func
.
isRequired
,
history
:
PropTypes
.
any
.
isRequired
,
datasets_after
:
PropTypes
.
string
,
per_page
:
PropTypes
.
number
,
...
...
@@ -243,8 +244,8 @@ class DatasetListUnstyled extends React.Component {
}
renderEntryActions
(
entry
)
{
const
{
on
Change
}
=
this
.
props
return
<
DatasetActions
search
dataset
=
{
entry
}
onChange
=
{
()
=>
onChange
({})
}
/
>
const
{
on
Edit
}
=
this
.
props
return
<
DatasetActions
search
dataset
=
{
entry
}
onChange
=
{
onEdit
}
/
>
}
render
()
{
...
...
gui/src/components/search/EntryList.js
View file @
e0ca6a9d
...
...
@@ -21,7 +21,7 @@ export function Published(props) {
<
/Tooltip
>
}
else
{
return
<
Tooltip
title
=
"
not published yet
"
>
<
PrivateIcon
color
=
"
secondary
"
/>
<
PrivateIcon
color
=
"
error
"
/>
<
/Tooltip
>
}
}
...
...
@@ -32,6 +32,7 @@ export class EntryListUnstyled extends React.Component {
data
:
PropTypes
.
object
.
isRequired
,
query
:
PropTypes
.
object
.
isRequired
,
onChange
:
PropTypes
.
func
,
onEdit
:
PropTypes
.
func
,
history
:
PropTypes
.
any
.
isRequired
,
order_by
:
PropTypes
.
string
.
isRequired
,
order
:
PropTypes
.
number
.
isRequired
,
...
...
@@ -236,7 +237,7 @@ export class EntryListUnstyled extends React.Component {
{(
row
.
datasets
||
[]).
map
(
ds
=>
(
<
Typography
key
=
{
ds
.
dataset_id
}
>
<
Link
component
=
{
RouterLink
}
to
=
{
`/dataset/id/
${
ds
.
dataset_id
}
`
}
>
{
ds
.
name
}
<
/Link
>
{
ds
.
doi
?
<
span
>&
nbsp
;
(
<
Link
href
=
{
ds
.
doi
}
>
{
ds
.
doi
}
<
/Link>
)
</
span
>
:
<
React
.
Fragment
/>
}
{
ds
.
doi
?
<
span
>&
nbsp
;
(
<
Link
href
=
{
`https://dx.doi.org/
${
ds
.
doi
}
`
}
>
{
ds
.
doi
}
<
/Link>
)
</
span
>
:
<
React
.
Fragment
/>
}
<
/Typography>
))
}
<
/div
>
<
/Quantity
>
...
...
@@ -321,7 +322,7 @@ export class EntryListUnstyled extends React.Component {
const
createActions
=
(
props
,
moreActions
)
=>
<
React
.
Fragment
>
{
example
&&
editable
?
<
EditUserMetadataDialog
example
=
{
example
}
total
=
{
selected
===
null
?
totalNumber
:
selected
.
length
}
onEditComplete
=
{()
=>
this
.
props
.
on
Change
()}
onEditComplete
=
{()
=>
this
.
props
.
on
Edit
()}
{...
props
}
/> : ''
}
<
DownloadButton
...
...
gui/src/components/search/QuantityHistogram.js
View file @
e0ca6a9d
...
...
@@ -5,7 +5,7 @@ import * as d3 from 'd3'
import
{
scaleBand
,
scalePow
}
from
'
d3-scale
'
import
{
formatQuantity
,
nomadPrimaryColor
,
nomadSecondaryColor
,
nomadFontFamily
}
from
'
../../config.js
'
import
{
searchContext
}
from
'
./SearchContext.js
'
import
*
as
searchQuantities
from
'
../../searchQuantities
.json
'
import
searchQuantities
from
'
../../searchQuantities
'
const
unprocessedLabel
=
'
not processed
'
const
unavailableLabel
=
'
unavailable
'
...
...
gui/src/components/search/Search.js
View file @
e0ca6a9d
...
...
@@ -549,13 +549,14 @@ const useScroll = (apiGroupName, afterParameterName) => {
}
function
SearchEntryList
(
props
)
{
const
{
response
,
requestParameters
,
apiQuery
}
=
useContext
(
searchContext
)
const
{
response
,
requestParameters
,
apiQuery
,
update
}
=
useContext
(
searchContext
)
const
setRequestParameters
=
usePagination
()
return
<
EntryList
query
=
{
apiQuery
}
editable
=
{
apiQuery
.
owner
===
'
staging
'
||
apiQuery
.
owner
===
'
user
'
}
data
=
{
response
}
onChange
=
{
setRequestParameters
}
onEdit
=
{
update
}
actions
=
{
<
React
.
Fragment
>
<
ReRunSearchButton
/>
...
...
@@ -568,9 +569,10 @@ function SearchEntryList(props) {
}
function
SearchDatasetList
(
props
)
{
const
{
response
}
=
useContext
(
searchContext
)
const
{
response
,
update
}
=
useContext
(
searchContext
)
return
<
DatasetList
data
=
{
response
}
onEdit
=
{
update
}
actions
=
{
<
ReRunSearchButton
/>
}
{...
response
}
{...
props
}
{...
useScroll
(
'
datasets
'
)}
/
>
...
...
@@ -586,8 +588,9 @@ function SearchGroupList(props) {
}
function
SearchUploadList
(
props
)
{
const
{
response
}
=
useContext
(
searchContext
)
const
{
response
,
update
}
=
useContext
(
searchContext
)
return
<
UploadList
data
=
{
response
}
onEdit
=
{
update
}
actions
=
{
<
ReRunSearchButton
/>
}
{...
response
}
{...
props
}
{...
useScroll
(
'
uploads
'
)}
/
>
...
...
gui/src/components/search/SearchContext.js
View file @
e0ca6a9d
...
...
@@ -150,7 +150,7 @@ export default function SearchContext({initialRequest, initialQuery, query, chil
owner
:
owner
,
...
initialQuery
,
...
requestRef
.
current
.
query
,
query
...
query
}
if
(
dateHistogram
)
{
dateHistogramInterval
=
Dates
.
intervalSeconds
(
...
...
gui/src/components/search/UploadsList.js
View file @
e0ca6a9d
...
...
@@ -53,7 +53,7 @@ class UploadActionsUnstyled extends React.Component {
classes
:
PropTypes
.
object
.
isRequired
,
upload
:
PropTypes
.
object
.
isRequired
,
user
:
PropTypes
.
object
,
on
Change
:
PropTypes
.
func
,
on
Edit
:
PropTypes
.
func
,
history
:
PropTypes
.
object
.
isRequired
}
...
...
@@ -71,9 +71,9 @@ class UploadActionsUnstyled extends React.Component {
}
handleEdit
()
{
const
{
on
Change
,
upload
}
=
this
.
props
if
(
on
Change
)
{
on
Change
(
upload
)
const
{
on
Edit
,
upload
}
=
this
.
props
if
(
on
Edit
)
{
on
Edit
(
upload
)
}
}
...
...
@@ -86,7 +86,7 @@ class UploadActionsUnstyled extends React.Component {
const
editable
=
user
&&
upload
.
example
&&
upload
.
example
.
authors
.
find
(
author
=>
author
.
user_id
===
user
.
sub
)
const
query
=
{
upload_id
:
upload
.
example
.
upload_id
}
const
query
=
{
upload_id
:
[
upload
.
example
.
upload_id
]
}
return
<
FormGroup
row
classes
=
{{
root
:
classes
.
group
}}
>
<
Tooltip
title
=
"
Open this upload on the uploads page
"
>
...
...
@@ -112,6 +112,7 @@ class UploadListUnstyled extends React.Component {
data
:
PropTypes
.
object
,
total
:
PropTypes
.
number
,
onChange
:
PropTypes
.
func
.
isRequired
,
onEdit
:
PropTypes
.
func
.
isRequired
,
history
:
PropTypes
.
any
.
isRequired
,
uploads_after
:
PropTypes
.
string
,
actions
:
PropTypes
.
element
...
...
@@ -176,8 +177,8 @@ class UploadListUnstyled extends React.Component {
}
renderEntryActions
(
entry
)
{
const
{
on
Change
}
=
this
.
props
return
<
UploadActions
search
upload
=
{
entry
}
on
Change
=
{()
=>
onChange
({})}
/
>
const
{
on
Edit
}
=
this
.
props
return
<
UploadActions
search
upload
=
{
entry
}
on
Edit
=
{
onEdit
}
/
>
}
render
()
{
...
...
gui/src/components/uploads/Upload.js
View file @
e0ca6a9d
...
...
@@ -171,7 +171,7 @@ class Upload extends React.Component {
cursor
:
'
pointer
'
},
decideIcon
:
{
color
:
theme
.
palette
.
secondary
.
main
color
:
theme
.
palette
.
error
.
main
}
})
...
...
@@ -605,12 +605,13 @@ class Upload extends React.Component {
return
<
EntryList
title
=
{
`Upload with
${
data
.
pagination
.
total
}
detected entries`
}
query
=
{{
upload_id
:
upload
.
upload_id
}}
query
=
{{
upload_id
:
[
upload
.
upload_id
]
}}
columns
=
{
columns
}
selectedColumns
=
{
Upload
.
defaultSelectedColumns
}
editable
=
{
tasks_status
===
'
SUCCESS
'
}
data
=
{
data
}
onChange
=
{
this
.
handleChange
}
onEdit
=
{
this
.
handleChange
}
actions
=
{
actions
}
showEntryActions
=
{
entry
=>
entry
.
processed
||
!
running
}
{...
this
.
state
.
params
}
...
...
@@ -634,7 +635,7 @@ class Upload extends React.Component {
}
else
if
(
upload
.
published
)
{
return
render
(
<
PublishedIcon
size
=
{
32
}
color
=
"
primary
"
/>
,
'
This upload is published
'
)
}
else
{
return
render
(
<
UnPublishedIcon
size
=
{
32
}
color
=
"
secondary
"
/>
,
'
This upload is not published yet, and only visible to you
'
)
return
render
(
<
UnPublishedIcon
size
=
{
32
}
color
=
"
error
"
/>
,
'
This upload is not published yet, and only visible to you
'
)
}
}
...
...
gui/src/components/uploads/UploadPage.js
View file @
e0ca6a9d
import
React
from
'
react
'
import
PropTypes
,
{
instanceOf
}
from
'
prop-types
'
import
Markdown
from
'
../Markdown
'
import
{
withStyles
,
Paper
,
IconButton
,
FormGroup
,
FormLabel
,
Tooltip
}
from
'
@material-ui/core
'
import
{
withStyles
,
Paper
,
IconButton
,
FormGroup
,
FormLabel
,
Tooltip
,
Typography
}
from
'
@material-ui/core
'
import
UploadIcon
from
'
@material-ui/icons/CloudUpload
'
import
Dropzone
from
'
react-dropzone
'
import
Upload
from
'
./Upload
'
...
...
@@ -16,6 +16,7 @@ import Pagination from 'material-ui-flat-pagination'
import
{
CopyToClipboard
}
from
'
react-copy-to-clipboard
'
import
{
guiBase
}
from
'
../../config
'
import
qs
from
'
qs
'
import
{
CodeList
}
from
'
../About
'
export
const
help
=
`
NOMAD allows you to upload data. After upload, NOMAD will process your data: it will
...
...
@@ -126,7 +127,8 @@ class UploadPage extends React.Component {
'
& svg
'
:
{
marginLeft
:
'
auto
'
,
marginRight
:
'
auto
'
}
},
marginTop
:
theme
.
spacing
(
3
)
},
dropzoneAccept
:
{
background
:
theme
.
palette
.
primary
.
main
,
...
...
@@ -267,6 +269,16 @@ class UploadPage extends React.Component {
return
(
<
div
className
=
{
classes
.
root
}
>
<
Typography
>
To
prepare
your
data
,
simply
use
<
b
>
zip
<
/b> or <b>tar</
b
>
to
create
a
single
file
that
contains
all
your
files
as
they
are
.
These
.
zip
/
.
tar
files
can
contain
subdirectories
and
additional
files
.
NOMAD
will
search
through
all
files
and
identify
the
relevant
files
automatically
.
Each
uploaded
file
can
be
<
b
>
up
to
32
GB
<
/b> in size, you can have <b>up to 10 unpublishe
d
uploads
<
/b> simultaneously. Your uploaded data is not published right away
.
<
/Typography
>
<
Typography
>
The
following
codes
are
supported
:
<
CodeList
/>
.
<
/Typography
>
<
Paper
className
=
{
classes
.
dropzoneContainer
}
>
<
Dropzone
accept
=
{[
...
...
@@ -288,7 +300,7 @@ class UploadPage extends React.Component {
rejectClassName
=
{
classes
.
dropzoneReject
}
onDrop
=
{
this
.
onDrop
.
bind
(
this
)}
>
<
p
>
drop
.
tar
.
gz
or
.
zip
files
here
<
/p
>
<
p
>
click
or
drop
.
tar
.
gz
/
.
zip
files
here
<
/p
>
<
UploadIcon
style
=
{{
fontSize
:
36
}}
/
>
<
/Dropzone
>
<
/Paper
>
...
...
gui/src/searchQuantities.json
View file @
e0ca6a9d
...
...
@@ -234,7 +234,7 @@
"name"
:
"dft.code_name"
,
"description"
:
"The name of the used code."
,
"many"
:
false
,
"statistic_size"
:
3
6
,
"statistic_size"
:
3
4
,
"statistic_values"
:
[
"ABINIT"
,
"ATK"
,
...
...
@@ -246,30 +246,28 @@
"Crystal"
,
"DL_POLY"
,
"DMol3"
,
"elastic"
,
"elk"
,
"exciting"
,
"FHI-aims"
,
"fleur"
,
"GAMESS"
,
"GPAW"
,
"GPAW"
,
"Gaussian"
,
"GPAW"
,
"gulp"
,
"libAtoms"
,
"MOLCAS"
,
"NWChem"
,
"Octopus"
,
"ONETEP"
,
"ORCA"
,
"Octopus"
,
"Phonopy"
,
"qbox"
,
"Quantum Espresso"
,
"Siesta"
,
"
VASP
"
,
"
turbomole
"
,
"VASP"
,
"WIEN2k"
,
"elastic"
,
"elk"
,
"exciting"
,
"fleur"
,
"gulp"
,
"libAtoms"
,
"qbox"
,
"turbomole"
,
"unavailable"
,
"not processed"
]
...
...
nomad/app/api/archive.py
View file @
e0ca6a9d
...
...
@@ -233,19 +233,21 @@ class ArchiveQueryResource(Resource):
See ``/repo`` endpoint for documentation on the search
parameters.
This endpoint uses pagination (see /repo) or id aggregation to handle large result
sets over multiple requests.
Use aggregation.after and aggregation.per_page to request a
certain page with id aggregation.
The actual data are in results and a supplementary python code (curl) to
execute search is in python (curl).
'''
try
:
data_in
=
request
.
get_json
()
scroll
=
data_in
.
get
(
'scroll'
,
None
)
if
scroll
:
scroll_id
=
scroll
.
get
(
'scroll_id'
)
scroll
=
True
aggregation
=
data_in
.
get
(
'aggregation'
,
None
)
pagination
=
data_in
.
get
(
'pagination'
,
{})
page
=
pagination
.
get
(
'page'
,
1
)
per_page
=
pagination
.
get
(
'per_page'
,
10
if
not
scroll
else
1000
)
per_page
=
pagination
.
get
(
'per_page'
,
10
)
query
=
data_in
.
get
(
'query'
,
{})
...
...
@@ -270,20 +272,19 @@ class ArchiveQueryResource(Resource):
search_request
.
owner
(
'all'
)
apply_search_parameters
(
search_request
,
query
)
search_request
.
include
(
'calc_id'
,
'upload_id'
,
'with_embargo'
,
'published'
,
'parser_name'
)
if
not
aggregation
:
search_request
.
include
(
'calc_id'
,
'upload_id'
,
'with_embargo'
,
'published'
,
'parser_name'
)
try
:
if
scroll
:
results
=
search_request
.
execute_
scroll
ed
(
scroll_id
=
scroll_id
,
size
=
per_page
,
order_by
=
'upload_id'
)
results
[
'scroll'
][
'scroll'
]
=
True
if
aggregation
:
results
=
search_request
.
execute_
aggregat
ed
(
after
=
aggregation
.
get
(
'after'
),
per_page
=
aggregation
.
get
(
'per_page'
,
1000
),
includes
=
[
'with_embargo'
,
'published'
,
'parser_name'
])
else
:
results
=
search_request
.
execute_paginated
(
per_page
=
per_page
,
page
=
page
,
order_by
=
'upload_id'
)
except
search
.
ScrollIdNotFound
:
abort
(
400
,
'The given scroll_id does not exist.'
)
except
KeyError
as
e
:
abort
(
400
,
str
(
e
))
...
...
@@ -334,7 +335,7 @@ class ArchiveQueryResource(Resource):
except
Exception
as
e
:
if
raise_errors
:
raise
e
common
.
logger
(
str
(
e
),
exc_info
=
e
)
common
.
logger
.
error
(
str
(
e
),
upload_id
=
upload_id
,
calc_id
=
calc_id
,
exc_info
=
e
)
if
upload_files
is
not
None
:
upload_files
.
close
()
...
...
nomad/app/api/common.py
View file @
e0ca6a9d
...
...
@@ -65,10 +65,18 @@ scroll_model = api.model('Scroll', {
'total'
:
fields
.
Integer
(
default
=
0
,
description
=
'The total amount of hits for the search.'
),
'scroll_id'
:
fields
.
String
(
default
=
None
,
allow_null
=
True
,
description
=
'The scroll_id that can be used to retrieve the next page.'
),
'size'
:
fields
.
Integer
(
default
=
0
,
help
=
'The size of the returned scroll page.'
)})
''' Model used in responses with scroll. '''
aggregation_model
=
api
.
model
(
'Aggregation'
,
{
'after'
:
fields
.
String
(
description
=
'The after key for the current request.'
,
allow_null
=
True
),
'total'
:
fields
.
Integer
(
default
=
0
,
description
=
'The total amount of hits for the search.'
),
'per_page'
:
fields
.
Integer
(
default
=
0
,
help
=
'The size of the requested page.'
,
allow_null
=
True
)})
''' Model used in responses with id aggregation. '''
search_model_fields
=
{
'pagination'
:
fields
.
Nested
(
pagination_model
,
allow_null
=
True
,
skip_none
=
True
),
'scroll'
:
fields
.
Nested
(
scroll_model
,
allow_null
=
True
,
skip_none
=
True
),
'aggregation'
:
fields
.
Nested
(
aggregation_model
,
allow_null
=
True
),
'results'
:
fields
.
List
(
fields
.
Raw
(
allow_null
=
True
,
skip_none
=
True
),
description
=
(
'A list of search results. Each result is a dict with quantitie names as key and '
'values as values'
),
allow_null
=
True
,
skip_none
=
True
),
...
...
nomad/app/api/info.py
View file @
e0ca6a9d
...
...
@@ -54,10 +54,15 @@ statistics_info_model = api.model('StatisticsInfo', {
# 'archive_file_size': fields.Integer(description='Total amount of binary archive data in TB')
})
code_info_model
=
api
.
model
(
'CodeInfo'
,
{
'code_name'
:
fields
.
String
(
description
=
'Name of the code or input format'
,
allow_null
=
True
),
'code_homepage'
:
fields
.
String
(
description
=
'Homepage of the code or input format'
,
allow_null
=
True
)
},
allow_null
=
True
,
skip_none
=
True
)
info_model
=
api
.
model
(
'Info'
,
{
'parsers'
:
fields
.
List
(
fields
.
String
),
'metainfo_packages'
:
fields
.
List
(
fields
.
String
),
'codes'
:
fields
.
List
(
fields
.
String
),
'codes'
:
fields
.
List
(
fields
.
Nested
(
code_info_model
)
),
'normalizers'
:
fields
.
List
(
fields
.
String
),
'domains'
:
fields
.
List
(
fields
.
Nested
(
model
=
domain_model
)),
'statistics'
:
fields
.
Nested
(
model
=
statistics_info_model
,
description
=
'General NOMAD statistics'
),
...
...
@@ -88,10 +93,14 @@ class InfoResource(Resource):
@
api
.
marshal_with
(
info_model
,
skip_none
=
True
,
code
=
200
,
description
=
'Info send'
)
def
get
(
self
):
''' Return information about the nomad backend and its configuration. '''
codes
=
[
parser
.
code_name
for
parser
in
parsing
.
parser_dict
.
values
()
if
isinstance
(
parser
,
parsing
.
MatchingParser
)
and
parser
.
domain
==
'dft'
]
codes_dict
=
{}
for
parser
in
parsing
.
parser_dict
.
values
():
if
isinstance
(
parser
,
parsing
.
MatchingParser
)
and
parser
.
domain
==
'dft'
:
code_name
=
parser
.
code_name
if
code_name
in
codes_dict
:
continue
codes_dict
[
code_name
]
=
dict
(
code_name
=
code_name
,
code_homepage
=
parser
.
code_homepage
)
codes
=
sorted
(
list
(
codes_dict
.
values
()),
key
=
lambda
code_info
:
code_info
[
'code_name'
].
lower
())
return
{
'parsers'
:
[
...
...
@@ -100,7 +109,7 @@ class InfoResource(Resource):
'metainfo_packages'
:
[
'general'
,
'general.experimental'
,
'common'
,
'public'
]
+
sorted
([
key
[
key
.
index
(
'/'
)
+
1
:]
for
key
in
parsing
.
parser_dict
.
keys
()]),
'codes'
:
sorted
(
set
(
codes
),
key
=
lambda
x
:
x
.
lower
())
,
'codes'
:
codes
,
'normalizers'
:
[
normalizer
.
__name__
for
normalizer
in
normalizing
.
normalizers
],
'statistics'
:
statistics
(),
'domains'
:
[
...
...
nomad/app/api/repo.py
View file @
e0ca6a9d
...
...
@@ -554,7 +554,7 @@ class EditRepoCalcsResource(Resource):
# remove potentially empty old datasets
if
removed_datasets
is
not
None
:
for
dataset
in
removed_datasets
:
if
proc
.
Calc
.
objects
(
metadata__dataset
_id
=
dataset
).
first
()
is
None
:
if
proc
.
Calc
.
objects
(
metadata__dataset
s
=
dataset
).
first
()
is
None
:
Dataset
.
m_def
.
a_mongo
.
objects
(
dataset_id
=
dataset
).
delete
()
return
json_data
,
200
...
...
nomad/cli/admin/uploads.py
View file @
e0ca6a9d
...
...
@@ -193,7 +193,7 @@ def chown(ctx, username, uploads):
search
.
refresh
()
@
uploads
.
command
(
help
=
'
Change the owner of the upload and all its calcs
.'
)
@
uploads
.
command
(
help
=
'
Reset the processing state
.'
)
@
click
.
argument
(
'UPLOADS'
,
nargs
=-
1
)
@
click
.
option
(
'--with-calcs'
,
is_flag
=
True
,
help
=
'Also reset all calculations.'
)
@
click
.
pass_context
...
...
@@ -209,6 +209,7 @@ def reset(ctx, uploads, with_calcs):
dict
(
upload_id
=
upload
.
upload_id
),
{
'$set'
:
proc
.
Calc
.
reset_pymongo_update
()})
upload
.
process_status
=
None
upload
.
reset
()
upload
.
save
()
i
+=
1
...
...
nomad/client.py
View file @
e0ca6a9d
...
...
@@ -98,6 +98,10 @@ sub-sections return lists of further objects. Here we navigate the sections ``se
sub-section ``section_system`` to access the quantity ``energy_total``. This quantity is a
number with an attached unit (Joule), which can be converted to something else (e.g. Hartree).
The create query object keeps all results in memory. Keep this in mind, when you are
accessing a large amount of query results. You should use :func:`ArchiveQuery.clear`
to remove unnecessary results.