Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
nomad-lab
nomad-FAIR
Commits
620c6d4b
Commit
620c6d4b
authored
Jul 24, 2020
by
Markus Scheidgen
Browse files
Merge branch 'encyclopedia-api' into 'v0.8.4'
Encyclopedia merge See merge request
!137
parents
36e681e3
3a21531a
Pipeline
#79345
passed with stages
in 20 minutes and 1 second
Changes
233
Pipelines
1
Show whitespace changes
Inline
Side-by-side
encyclopedia-gui
@
15cb97c9
Compare
62716704
...
15cb97c9
Subproject commit
62716704fcc6f74d5820311bffa7e7ebb0bf38c2
Subproject commit
15cb97c9cf61fd59309a50f6700b99733fc6a659
vasp
@
8776a0bc
Compare
1a0c8cb0
...
8776a0bc
Subproject commit
1a0c8cb0801375a78134c67f7a6d31319a33850
3
Subproject commit
8776a0bc7b32fb51e98ea8fe7af7d5630240edd
3
wien2k
@
d58ac9b6
Compare
16f7a7f6
...
d58ac9b6
Subproject commit
16f7a7f6909dbe16908d1be2e1fa03d3bddd17b5
Subproject commit
d58ac9b61ba2c9e82f50a30c2c234778743d51ad
nomad/app/api/encyclopedia.py
View file @
620c6d4b
...
@@ -24,12 +24,12 @@ from flask import request
...
@@ -24,12 +24,12 @@ from flask import request
from
elasticsearch_dsl
import
Search
,
Q
,
A
from
elasticsearch_dsl
import
Search
,
Q
,
A
from
elasticsearch_dsl.utils
import
AttrDict
from
elasticsearch_dsl.utils
import
AttrDict
from
nomad
import
config
,
files
from
nomad
import
config
,
files
,
infrastructure
from
nomad.units
import
ureg
from
nomad.units
import
ureg
from
nomad.atomutils
import
get_hill_decomposition
from
nomad.atomutils
import
get_hill_decomposition
from
nomad.datamodel.datamodel
import
EntryArchive
from
nomad.datamodel.datamodel
import
EntryArchive
from
.api
import
api
from
.api
import
api
from
.
common
import
enable_gzip
from
.
auth
import
authenticate
ns
=
api
.
namespace
(
"encyclopedia"
,
description
=
"Access encyclopedia metadata."
)
ns
=
api
.
namespace
(
"encyclopedia"
,
description
=
"Access encyclopedia metadata."
)
re_formula
=
re
.
compile
(
r
"([A-Z][a-z]?)(\d*)"
)
re_formula
=
re
.
compile
(
r
"([A-Z][a-z]?)(\d*)"
)
...
@@ -240,14 +240,65 @@ class EncMaterialsResource(Resource):
...
@@ -240,14 +240,65 @@ class EncMaterialsResource(Resource):
except
Exception
as
e
:
except
Exception
as
e
:
abort
(
400
,
message
=
str
(
e
))
abort
(
400
,
message
=
str
(
e
))
# The queries that correspond to AND queries typically need to access
# multiple calculations at once to find the material ids that
# correspond to the query. To implement this behaviour we need to run
# an initial aggregation that checks that the requested properties are
# present for a material. This is a a very crude solution that does not
# scale to complex queries, but I'm not sure we can do much better
# until we have a separate index for materials.
property_map
=
{
"has_thermal_properties"
:
"encyclopedia.properties.thermodynamical_properties"
,
"has_band_structure"
:
"encyclopedia.properties.electronic_band_structure"
,
"has_dos"
:
"encyclopedia.properties.electronic_dos"
,
"has_fermi_surface"
:
"encyclopedia.properties.fermi_surface"
,
}
requested_properties
=
[]
# The size is set very large because all the results need to be
# returned. We cannot get the results in a paginated way with composite
# aggregation, because pipeline aggregations are not compatible with
# them.
agg_parent
=
A
(
"terms"
,
field
=
"encyclopedia.material.material_id"
,
size
=
5000000
)
for
key
,
value
in
property_map
.
items
():
if
data
[
key
]
is
True
:
agg
=
A
(
"filter"
,
exists
=
{
"field"
:
value
})
agg_parent
.
bucket
(
key
,
agg
)
requested_properties
.
append
(
key
)
if
len
(
requested_properties
)
>
1
:
bool_query
=
Q
(
"bool"
,
filter
=
get_enc_filter
(),
)
s
=
Search
(
index
=
config
.
elastic
.
index_name
)
s
=
s
.
query
(
bool_query
)
s
.
aggs
.
bucket
(
"materials"
,
agg_parent
)
buckets_path
=
{
x
:
"{}._count"
.
format
(
x
)
for
x
in
requested_properties
}
script
=
" && "
.
join
([
"params.{} > 0"
.
format
(
x
)
for
x
in
requested_properties
])
agg_parent
.
pipeline
(
"selector"
,
A
(
"bucket_selector"
,
buckets_path
=
buckets_path
,
script
=
script
,
))
s
=
s
.
extra
(
**
{
"size"
:
0
,
})
response
=
s
.
execute
()
material_ids
=
[
x
[
"key"
]
for
x
in
response
.
aggs
.
materials
.
buckets
]
if
len
(
material_ids
)
==
0
:
abort
(
404
,
message
=
"No materials found for the given search criteria or pagination."
)
# After finding the material ids that fill the AND conditions, continue
# with a simple OR query.
filters
=
get_enc_filter
()
filters
=
get_enc_filter
()
must_nots
=
[]
must_nots
=
[]
musts
=
[]
musts
=
[]
def
add_terms_filter
(
source
,
target
,
query_type
=
"terms"
):
def
add_terms_filter
(
source
,
target
,
query_type
=
"terms"
):
if
data
[
source
]:
if
data
[
source
]
is
not
None
:
filters
.
append
(
Q
(
query_type
,
**
{
target
:
data
[
source
]}))
filters
.
append
(
Q
(
query_type
,
**
{
target
:
data
[
source
]}))
if
len
(
requested_properties
)
>
1
:
filters
.
append
(
Q
(
"terms"
,
encyclopedia__material__material_id
=
material_ids
))
add_terms_filter
(
"material_name"
,
"encyclopedia.material.material_name"
)
add_terms_filter
(
"material_name"
,
"encyclopedia.material.material_name"
)
add_terms_filter
(
"structure_type"
,
"encyclopedia.material.bulk.structure_type"
)
add_terms_filter
(
"structure_type"
,
"encyclopedia.material.bulk.structure_type"
)
add_terms_filter
(
"space_group_number"
,
"encyclopedia.material.bulk.space_group_number"
)
add_terms_filter
(
"space_group_number"
,
"encyclopedia.material.bulk.space_group_number"
)
...
@@ -258,7 +309,8 @@ class EncMaterialsResource(Resource):
...
@@ -258,7 +309,8 @@ class EncMaterialsResource(Resource):
add_terms_filter
(
"basis_set_type"
,
"dft.basis_set"
)
add_terms_filter
(
"basis_set_type"
,
"dft.basis_set"
)
add_terms_filter
(
"code_name"
,
"dft.code_name"
)
add_terms_filter
(
"code_name"
,
"dft.code_name"
)
# Add exists filters
# Add exists filters if only one property was requested. The initial
# aggregation will handlei multiple simultaneous properties.
def
add_exists_filter
(
source
,
target
):
def
add_exists_filter
(
source
,
target
):
param
=
data
[
source
]
param
=
data
[
source
]
if
param
is
not
None
:
if
param
is
not
None
:
...
@@ -267,11 +319,9 @@ class EncMaterialsResource(Resource):
...
@@ -267,11 +319,9 @@ class EncMaterialsResource(Resource):
filters
.
append
(
query
)
filters
.
append
(
query
)
elif
param
is
False
:
elif
param
is
False
:
must_nots
.
append
(
query
)
must_nots
.
append
(
query
)
if
len
(
requested_properties
)
==
1
:
add_exists_filter
(
"has_thermal_properties"
,
"encyclopedia.properties.thermodynamical_properties"
)
prop_name
=
requested_properties
[
0
]
add_exists_filter
(
"has_band_structure"
,
"encyclopedia.properties.electronic_band_structure"
)
add_exists_filter
(
prop_name
,
property_map
[
prop_name
])
add_exists_filter
(
"has_dos"
,
"encyclopedia.properties.electronic_dos"
)
add_exists_filter
(
"has_fermi_surface"
,
"encyclopedia.properties.fermi_surface"
)
# Add range filters
# Add range filters
def
add_range_filter
(
source
,
target
,
source_unit
=
None
,
target_unit
=
None
):
def
add_range_filter
(
source
,
target
,
source_unit
=
None
,
target_unit
=
None
):
...
@@ -430,8 +480,8 @@ class EncMaterialsResource(Resource):
...
@@ -430,8 +480,8 @@ class EncMaterialsResource(Resource):
pages
[
"total"
]
=
n_materials
pages
[
"total"
]
=
n_materials
# 2. Collapse approach. Quickly provides a list of materials
# 2. Collapse approach. Quickly provides a list of materials
# corresponding to the query, offers full pagination,
doesn"t include
# corresponding to the query, offers full pagination,
the number of
#
the number of
matches per material.
# matches per material
needs to be requested with a separate query
.
elif
mode
==
"collapse"
:
elif
mode
==
"collapse"
:
s
=
Search
(
index
=
config
.
elastic
.
index_name
)
s
=
Search
(
index
=
config
.
elastic
.
index_name
)
s
=
s
.
query
(
bool_query
)
s
=
s
.
query
(
bool_query
)
...
@@ -741,7 +791,6 @@ calculations_result = api.model("calculations_result", {
...
@@ -741,7 +791,6 @@ calculations_result = api.model("calculations_result", {
@
ns
.
route
(
"/materials/<string:material_id>/calculations"
)
@
ns
.
route
(
"/materials/<string:material_id>/calculations"
)
class
EncCalculationsResource
(
Resource
):
class
EncCalculationsResource
(
Resource
):
@
enable_gzip
()
@
api
.
response
(
404
,
"Suggestion not found"
)
@
api
.
response
(
404
,
"Suggestion not found"
)
@
api
.
response
(
400
,
"Bad request"
)
@
api
.
response
(
400
,
"Bad request"
)
@
api
.
response
(
200
,
"Metadata send"
,
fields
.
Raw
)
@
api
.
response
(
200
,
"Metadata send"
,
fields
.
Raw
)
...
@@ -780,7 +829,9 @@ class EncCalculationsResource(Resource):
...
@@ -780,7 +829,9 @@ class EncCalculationsResource(Resource):
def
calc_score
(
entry
):
def
calc_score
(
entry
):
"""Custom scoring function used to sort results by their
"""Custom scoring function used to sort results by their
"quality". Currently built to mimic the scoring that was used
"quality". Currently built to mimic the scoring that was used
in the old Encyclopedia GUI.
in the old Encyclopedia GUI. Primarily sorts by quality measure,
ties are broken by alphabetic sorting of entry_id in order to
return consistent results.
"""
"""
score
=
0
score
=
0
functional_score
=
{
functional_score
=
{
...
@@ -800,7 +851,7 @@ class EncCalculationsResource(Resource):
...
@@ -800,7 +851,7 @@ class EncCalculationsResource(Resource):
if
has_dos
and
has_bs
:
if
has_dos
and
has_bs
:
score
+=
10
score
+=
10
return
score
return
(
score
,
entry
[
"calc_id"
])
# The calculations are first sorted by "quality"
# The calculations are first sorted by "quality"
sorted_calc
=
sorted
(
response
,
key
=
lambda
x
:
calc_score
(
x
),
reverse
=
True
)
sorted_calc
=
sorted
(
response
,
key
=
lambda
x
:
calc_score
(
x
),
reverse
=
True
)
...
@@ -1081,7 +1132,6 @@ calculation_property_result = api.model("calculation_property_result", {
...
@@ -1081,7 +1132,6 @@ calculation_property_result = api.model("calculation_property_result", {
@
ns
.
route
(
"/materials/<string:material_id>/calculations/<string:calc_id>"
)
@
ns
.
route
(
"/materials/<string:material_id>/calculations/<string:calc_id>"
)
class
EncCalculationResource
(
Resource
):
class
EncCalculationResource
(
Resource
):
@
enable_gzip
()
@
api
.
response
(
404
,
"Material or calculation not found"
)
@
api
.
response
(
404
,
"Material or calculation not found"
)
@
api
.
response
(
400
,
"Bad request"
)
@
api
.
response
(
400
,
"Bad request"
)
@
api
.
response
(
200
,
"Metadata send"
,
fields
.
Raw
)
@
api
.
response
(
200
,
"Metadata send"
,
fields
.
Raw
)
...
@@ -1175,15 +1225,20 @@ class EncCalculationResource(Resource):
...
@@ -1175,15 +1225,20 @@ class EncCalculationResource(Resource):
for
key
,
arch_path
in
arch_properties
.
items
():
for
key
,
arch_path
in
arch_properties
.
items
():
value
=
root
[
arch_path
]
value
=
root
[
arch_path
]
# Save derived properties and turn into dict
# Replace unnormalized thermodynamical properties with
# normalized ones and turn into dict
if
key
==
"thermodynamical_properties"
:
if
key
==
"thermodynamical_properties"
:
specific_heat_capacity
=
value
.
specific_heat_capacity
.
magnitude
.
tolist
()
specific_heat_capacity
=
value
.
specific_heat_capacity
.
magnitude
.
tolist
()
specific_free_energy
=
value
.
specific_vibrational_free_energy_at_constant_volume
.
magnitude
.
tolist
()
specific_free_energy
=
value
.
specific_vibrational_free_energy_at_constant_volume
.
magnitude
.
tolist
()
specific_heat_capacity
=
[
x
if
np
.
isfinite
(
x
)
else
None
for
x
in
specific_heat_capacity
]
specific_free_energy
=
[
x
if
np
.
isfinite
(
x
)
else
None
for
x
in
specific_free_energy
]
if
isinstance
(
value
,
list
):
if
isinstance
(
value
,
list
):
value
=
[
x
.
m_to_dict
()
for
x
in
value
]
value
=
[
x
.
m_to_dict
()
for
x
in
value
]
else
:
else
:
value
=
value
.
m_to_dict
()
value
=
value
.
m_to_dict
()
if
key
==
"thermodynamical_properties"
:
if
key
==
"thermodynamical_properties"
:
del
value
[
"thermodynamical_property_heat_capacity_C_v"
]
del
value
[
"vibrational_free_energy_at_constant_volume"
]
value
[
"specific_heat_capacity"
]
=
specific_heat_capacity
value
[
"specific_heat_capacity"
]
=
specific_heat_capacity
value
[
"specific_vibrational_free_energy_at_constant_volume"
]
=
specific_free_energy
value
[
"specific_vibrational_free_energy_at_constant_volume"
]
=
specific_free_energy
...
@@ -1226,6 +1281,63 @@ class EncCalculationResource(Resource):
...
@@ -1226,6 +1281,63 @@ class EncCalculationResource(Resource):
return
result
,
200
return
result
,
200
report_query
=
api
.
model
(
"report_query"
,
{
"server"
:
fields
.
String
,
"username"
:
fields
.
String
,
"email"
:
fields
.
String
,
"first_name"
:
fields
.
String
,
"last_name"
:
fields
.
String
,
"category"
:
fields
.
String
,
"subcategory"
:
fields
.
String
(
allow_null
=
True
),
"representatives"
:
fields
.
Raw
(
Raw
=
True
),
"message"
:
fields
.
String
,
})
@
ns
.
route
(
"/materials/<string:material_id>/reports"
)
class
ReportsResource
(
Resource
):
@
api
.
response
(
500
,
"Error sending report"
)
@
api
.
response
(
400
,
"Bad request"
)
@
api
.
response
(
204
,
"Report succesfully sent"
,
fields
.
Raw
)
@
api
.
expect
(
calculation_property_query
,
validate
=
False
)
@
api
.
marshal_with
(
calculation_property_result
,
skip_none
=
True
)
@
api
.
doc
(
"enc_report"
)
@
authenticate
(
required
=
True
)
def
post
(
self
,
material_id
):
# Get query parameters as json
try
:
query
=
marshal
(
request
.
get_json
(),
report_query
)
except
Exception
as
e
:
abort
(
400
,
message
=
str
(
e
))
# Send the report as an email
query
[
"material_id"
]
=
material_id
representatives
=
query
[
"representatives"
]
if
representatives
is
not
None
:
representatives
=
"
\n
"
+
"
\n
"
.
join
([
" {}: {}"
.
format
(
key
,
value
)
for
key
,
value
in
representatives
.
items
()])
query
[
"representatives"
]
=
representatives
mail
=
(
"Server: {server}
\n\n
"
"Username: {username}
\n
"
"First name: {first_name}
\n
"
"Last name: {last_name}
\n
"
"Email: {email}
\n\n
"
"Material id: {material_id}
\n
"
"Category: {category}
\n
"
"Subcategory: {subcategory}
\n
"
"Representative calculations: {representatives}
\n\n
"
"Message: {message}"
).
format
(
**
query
)
try
:
infrastructure
.
send_mail
(
name
=
"webmaster"
,
email
=
"lauri.himanen@gmail.com"
,
message
=
mail
,
subject
=
'Encyclopedia error report'
)
except
Exception
as
e
:
abort
(
500
,
message
=
"Error sending error report email."
)
print
(
mail
)
return
""
,
204
def
read_archive
(
upload_id
:
str
,
calc_id
:
str
)
->
EntryArchive
:
def
read_archive
(
upload_id
:
str
,
calc_id
:
str
)
->
EntryArchive
:
"""Used to read data from the archive.
"""Used to read data from the archive.
...
...
nomad/atomutils.py
View file @
620c6d4b
...
@@ -75,7 +75,7 @@ def find_match(pos: np.array, positions: np.array, eps: float) -> Union[int, Non
...
@@ -75,7 +75,7 @@ def find_match(pos: np.array, positions: np.array, eps: float) -> Union[int, Non
return
None
return
None
def
get_symmetry_string
(
space_group
:
int
,
wyckoff_sets
:
List
[
WyckoffSet
])
->
str
:
def
get_symmetry_string
(
space_group
:
int
,
wyckoff_sets
:
List
[
WyckoffSet
]
,
is_2d
:
bool
=
False
)
->
str
:
"""Used to serialize symmetry information into a string. The Wyckoff
"""Used to serialize symmetry information into a string. The Wyckoff
positions are assumed to be normalized and ordered as is the case if using
positions are assumed to be normalized and ordered as is the case if using
the matid-library.
the matid-library.
...
@@ -84,6 +84,9 @@ def get_symmetry_string(space_group: int, wyckoff_sets: List[WyckoffSet]) -> str
...
@@ -84,6 +84,9 @@ def get_symmetry_string(space_group: int, wyckoff_sets: List[WyckoffSet]) -> str
space_group: 3D space group number
space_group: 3D space group number
wyckoff_sets: Wyckoff sets that map a Wyckoff letter to related
wyckoff_sets: Wyckoff sets that map a Wyckoff letter to related
information
information
is_2d: Whether the symmetry information is analyzed from a 2D
structure. If true, a prefix is added to the string to distinguish
2D from 3D.
Returns:
Returns:
A string that encodes the symmetry properties of an atomistic
A string that encodes the symmetry properties of an atomistic
...
@@ -97,6 +100,9 @@ def get_symmetry_string(space_group: int, wyckoff_sets: List[WyckoffSet]) -> str
...
@@ -97,6 +100,9 @@ def get_symmetry_string(space_group: int, wyckoff_sets: List[WyckoffSet]) -> str
i_string
=
"{} {} {}"
.
format
(
element
,
wyckoff_letter
,
n_atoms
)
i_string
=
"{} {} {}"
.
format
(
element
,
wyckoff_letter
,
n_atoms
)
wyckoff_strings
.
append
(
i_string
)
wyckoff_strings
.
append
(
i_string
)
wyckoff_string
=
", "
.
join
(
sorted
(
wyckoff_strings
))
wyckoff_string
=
", "
.
join
(
sorted
(
wyckoff_strings
))
if
is_2d
:
string
=
"2D {} {}"
.
format
(
space_group
,
wyckoff_string
)
else
:
string
=
"{} {}"
.
format
(
space_group
,
wyckoff_string
)
string
=
"{} {}"
.
format
(
space_group
,
wyckoff_string
)
return
string
return
string
...
...
nomad/config.py
View file @
620c6d4b
...
@@ -233,7 +233,7 @@ normalize = NomadConfig(
...
@@ -233,7 +233,7 @@ normalize = NomadConfig(
max_2d_single_cell_size
=
7
,
max_2d_single_cell_size
=
7
,
# The distance tolerance between atoms for grouping them into the same
# The distance tolerance between atoms for grouping them into the same
# cluster. Used in detecting system type.
# cluster. Used in detecting system type.
cluster_threshold
=
3.1
,
cluster_threshold
=
2.5
,
# Defines the "bin size" for rounding cell angles for the material hash
# Defines the "bin size" for rounding cell angles for the material hash
angle_rounding
=
float
(
10.0
),
# unit: degree
angle_rounding
=
float
(
10.0
),
# unit: degree
# The threshold for a system to be considered "flat". Used e.g. when
# The threshold for a system to be considered "flat". Used e.g. when
...
...
nomad/datamodel/metainfo/public.py
View file @
620c6d4b
...
@@ -1290,14 +1290,6 @@ class section_dos(MSection):
...
@@ -1290,14 +1290,6 @@ class section_dos(MSection):
'''
,
'''
,
a_legacy
=
LegacyDefinition
(
name
=
'dos_energies'
))
a_legacy
=
LegacyDefinition
(
name
=
'dos_energies'
))
dos_fermi_energy
=
Quantity
(
type
=
np
.
dtype
(
np
.
float64
),
shape
=
[],
description
=
'''
Stores the Fermi energy of the density of states.
'''
,
a_legacy
=
LegacyDefinition
(
name
=
'dos_fermi_energy'
))
dos_integrated_values
=
Quantity
(
dos_integrated_values
=
Quantity
(
type
=
np
.
dtype
(
np
.
float64
),
type
=
np
.
dtype
(
np
.
float64
),
shape
=
[
'number_of_spin_channels'
,
'number_of_dos_values'
],
shape
=
[
'number_of_spin_channels'
,
'number_of_dos_values'
],
...
...
nomad/infrastructure.py
View file @
620c6d4b
...
@@ -433,6 +433,14 @@ def reset(remove: bool):
...
@@ -433,6 +433,14 @@ def reset(remove: bool):
def
send_mail
(
name
:
str
,
email
:
str
,
message
:
str
,
subject
:
str
):
def
send_mail
(
name
:
str
,
email
:
str
,
message
:
str
,
subject
:
str
):
"""Used to programmatically send mails.
Args:
name: The email recipient name.
email: The email recipient address.
messsage: The email body.
subject: The subject line.
"""
if
not
config
.
mail
.
enabled
:
if
not
config
.
mail
.
enabled
:
return
return
...
@@ -453,7 +461,6 @@ def send_mail(name: str, email: str, message: str, subject: str):
...
@@ -453,7 +461,6 @@ def send_mail(name: str, email: str, message: str, subject: str):
msg
=
MIMEText
(
message
)
msg
=
MIMEText
(
message
)
msg
[
'Subject'
]
=
subject
msg
[
'Subject'
]
=
subject
msg
[
'From'
]
=
'The nomad team <%s>'
%
config
.
mail
.
from_address
msg
[
'To'
]
=
name
msg
[
'To'
]
=
name
to_addrs
=
[
email
]
to_addrs
=
[
email
]
...
...
nomad/normalizing/encyclopedia/encyclopedia.py
View file @
620c6d4b
...
@@ -207,6 +207,7 @@ class EncyclopediaNormalizer(Normalizer):
...
@@ -207,6 +207,7 @@ class EncyclopediaNormalizer(Normalizer):
"""
"""
sec_enc
=
self
.
backend
.
entry_archive
.
section_metadata
.
m_create
(
EncyclopediaMetadata
)
sec_enc
=
self
.
backend
.
entry_archive
.
section_metadata
.
m_create
(
EncyclopediaMetadata
)
status_enums
=
EncyclopediaMetadata
.
status
.
type
status_enums
=
EncyclopediaMetadata
.
status
.
type
calc_enums
=
Calculation
.
calculation_type
.
type
# Do nothing if section_run is not present
# Do nothing if section_run is not present
if
self
.
section_run
is
None
:
if
self
.
section_run
is
None
:
...
@@ -251,16 +252,18 @@ class EncyclopediaNormalizer(Normalizer):
...
@@ -251,16 +252,18 @@ class EncyclopediaNormalizer(Normalizer):
)
)
return
return
# Get the method type. For now, we allow unknown method type to
# Get the method type. For now, we allow unknown method type for
# allow phonon calculations through.
# phonon calculations, as the method information is resolved at a
# later stage.
representative_method
,
method_type
=
self
.
method_type
(
method
)
representative_method
,
method_type
=
self
.
method_type
(
method
)
if
method_type
==
config
.
services
.
unavailable_value
:
if
method_type
==
config
.
services
.
unavailable_value
:
status
=
status_enums
.
unsupported_method_type
sec_enc
.
status
=
status_enums
.
unsupported_method_type
sec_enc
.
status
=
status
self
.
logger
.
info
(
self
.
logger
.
info
(
"unsupported method type for encyclopedia"
,
"unsupported method type for encyclopedia"
,
enc_status
=
status
,
enc_status
=
status
_enums
.
unsupported_method_type
,
)
)
if
calc_type
!=
calc_enums
.
phonon_calculation
:
return
# Get representative scc
# Get representative scc
try
:
try
:
...
@@ -284,6 +287,16 @@ class EncyclopediaNormalizer(Normalizer):
...
@@ -284,6 +287,16 @@ class EncyclopediaNormalizer(Normalizer):
# Put the encyclopedia section into backend
# Put the encyclopedia section into backend
self
.
fill
(
context
)
self
.
fill
(
context
)
# Check that the necessary information is in place
functional_type
=
method
.
functional_type
if
functional_type
is
None
:
sec_enc
.
status
=
status_enums
.
unsupported_method_type
self
.
logger
.
info
(
"unsupported functional type for encyclopedia"
,
enc_status
=
status_enums
.
unsupported_method_type
,
)
return
except
Exception
:
except
Exception
:
status
=
status_enums
.
failure
status
=
status_enums
.
failure
sec_enc
.
status
=
status
sec_enc
.
status
=
status
...
...
nomad/normalizing/encyclopedia/material.py
View file @
620c6d4b
...
@@ -422,6 +422,14 @@ class MaterialBulkNormalizer(MaterialNormalizer):
...
@@ -422,6 +422,14 @@ class MaterialBulkNormalizer(MaterialNormalizer):
class
Material2DNormalizer
(
MaterialNormalizer
):
class
Material2DNormalizer
(
MaterialNormalizer
):
"""Processes structure related metainfo for Encyclopedia 2D structures.
"""Processes structure related metainfo for Encyclopedia 2D structures.
"""
"""
def
material_id
(
self
,
material
:
Material
,
spg_number
:
int
,
wyckoff_sets
:
List
[
WyckoffSet
])
->
None
:
# The hash is based on the symmetry analysis of the structure when it
# is treated as a 3D structure. Due to this the hash may overlap with
# real 3D structures unless we include a distinguishing label for 2D
# structures in the hash seed.
norm_hash_string
=
atomutils
.
get_symmetry_string
(
spg_number
,
wyckoff_sets
,
is_2d
=
True
)
material
.
material_id
=
hash
(
norm_hash_string
)
def
lattice_vectors
(
self
,
ideal
:
IdealizedStructure
,
std_atoms
:
Atoms
)
->
None
:
def
lattice_vectors
(
self
,
ideal
:
IdealizedStructure
,
std_atoms
:
Atoms
)
->
None
:
cell_normalized
=
std_atoms
.
get_cell
()
cell_normalized
=
std_atoms
.
get_cell
()
cell_normalized
*=
1e-10
cell_normalized
*=
1e-10
...
@@ -514,6 +522,8 @@ class Material2DNormalizer(MaterialNormalizer):
...
@@ -514,6 +522,8 @@ class Material2DNormalizer(MaterialNormalizer):
self
.
lattice_vectors_primitive
(
ideal
,
prim_atoms
)
self
.
lattice_vectors_primitive
(
ideal
,
prim_atoms
)
self
.
formula
(
material
,
names
,
counts
)
self
.
formula
(
material
,
names
,
counts
)
self
.
formula_reduced
(
material
,
names
,
reduced_counts
)
self
.
formula_reduced
(
material
,
names
,
reduced_counts
)
self
.
species
(
material
,
names
)
self
.
species_and_counts
(
material
,
names
,
reduced_counts
)
self
.
lattice_parameters
(
ideal
,
std_atoms
,
ideal
.
periodicity
)
self
.
lattice_parameters
(
ideal
,
std_atoms
,
ideal
.
periodicity
)
...
@@ -727,5 +737,7 @@ class Material1DNormalizer(MaterialNormalizer):
...
@@ -727,5 +737,7 @@ class Material1DNormalizer(MaterialNormalizer):
self
.
lattice_vectors
(
ideal
,
std_atoms
)
self
.
lattice_vectors
(
ideal
,
std_atoms
)
self
.
formula
(
material
,
names
,
counts
)
self
.
formula
(
material
,
names
,
counts
)
self
.
formula_reduced
(
material
,
names
,
reduced_counts
)
self
.
formula_reduced
(
material
,
names
,
reduced_counts
)
self
.
species
(
material
,
names
)
self
.
species_and_counts
(
material
,
names
,
reduced_counts
)
self
.
material_id_1d
(
material
,
std_atoms
)
self
.
material_id_1d
(
material
,
std_atoms
)
self
.
lattice_parameters
(
ideal
,
std_atoms
,
ideal
.
periodicity
)
self
.
lattice_parameters
(
ideal
,
std_atoms
,
ideal
.
periodicity
)
nomad/normalizing/system.py
View file @
620c6d4b
...
@@ -248,7 +248,7 @@ class SystemNormalizer(SystemBasedNormalizer):
...
@@ -248,7 +248,7 @@ class SystemNormalizer(SystemBasedNormalizer):
system_type
=
config
.
services
.
unavailable_value
system_type
=
config
.
services
.
unavailable_value
if
len
(
atoms
)
<=
config
.
normalize
.
system_classification_with_clusters_threshold
:
if
len
(
atoms
)
<=
config
.
normalize
.
system_classification_with_clusters_threshold
:
try
:
try
:
classifier
=
Classifier
(
cluster_threshold
=
config
.
normalize
.
cluster_threshold
)
classifier
=
Classifier
(
radii
=
"covalent"
,
cluster_threshold
=
config
.
normalize
.
cluster_threshold
)
cls
=
classifier
.
classify
(
atoms
)
cls
=
classifier
.
classify
(
atoms
)
except
Exception
as
e
:
except
Exception
as
e
:
self
.
logger
.
error
(
self
.
logger
.
error
(
...
...
nomad/processing/data.py
View file @
620c6d4b
...
@@ -444,7 +444,7 @@ class Calc(Proc):
...
@@ -444,7 +444,7 @@ class Calc(Proc):
# Get encyclopedia method information directly from the referenced calculation.
# Get encyclopedia method information directly from the referenced calculation.
ref_enc_method
=
ref_archive
.
section_metadata
.
encyclopedia
.
method
ref_enc_method
=
ref_archive
.
section_metadata
.
encyclopedia
.
method
if
ref_enc_method
is
None
or
len
(
ref_enc_method
)
==
0
:
if
ref_enc_method
is
None
or
len
(
ref_enc_method
)
==
0
or
ref_enc_method
.
functional_type
is
None
:
raise
ValueError
(
"No method information available in referenced calculation."
)
raise
ValueError
(
"No method information available in referenced calculation."
)
backend
.
entry_archive
.
section_metadata
.
encyclopedia
.
method
=
ref_enc_method
backend
.
entry_archive
.
section_metadata
.
encyclopedia
.
method
=
ref_enc_method
...
@@ -455,6 +455,7 @@ class Calc(Proc):
...
@@ -455,6 +455,7 @@ class Calc(Proc):
self
.
_entry_metadata
.
dft
.
xc_functional
=
ref_archive
.
section_metadata
.
dft
.
xc_functional
self
.
_entry_metadata
.
dft
.
xc_functional
=
ref_archive
.
section_metadata
.
dft
.
xc_functional
self
.
_entry_metadata
.
dft
.
basis_set
=
ref_archive
.
section_metadata
.
dft
.
basis_set
self
.
_entry_metadata
.
dft
.
basis_set
=
ref_archive
.
section_metadata
.
dft
.
basis_set
self
.
_entry_metadata
.
dft
.
update_group_hash
()
self
.
_entry_metadata
.
dft
.
update_group_hash
()
self
.
_entry_metadata
.
encyclopedia
.
status
=
EncyclopediaMetadata
.
status
.
type
.
success
except
Exception
as
e
:
except
Exception
as
e
:
logger
.
error
(
"Could not retrieve method information for phonon calculation."
,
exception
=
e
)
logger
.
error
(
"Could not retrieve method information for phonon calculation."
,
exception
=
e
)
if
self
.
_entry_metadata
.
encyclopedia
is
None
:
if
self
.
_entry_metadata
.
encyclopedia
is
None
:
...
@@ -1077,7 +1078,7 @@ class Upload(Proc):
...
@@ -1077,7 +1078,7 @@ class Upload(Proc):
'"%s" '
%
self
.
name
if
self
.
name
else
''
,
self
.
upload_time
.
isoformat
()),
# pylint: disable=no-member
'"%s" '
%
self
.
name
if
self
.
name
else
''
,
self
.
upload_time
.
isoformat
()),
# pylint: disable=no-member
'You can review your data on your upload page: %s'
%
config
.
gui_url
(
page
=
'uploads'
),
'You can review your data on your upload page: %s'
%
config
.
gui_url
(
page
=
'uploads'
),
''
,
''
,
'If you encouter any issues with your upload, please let us know and repl
a
y to this email.'
,
'If you encou
n
ter any issues with your upload, please let us know and reply to this email.'
,
''
,
''
,
'The nomad team'
'The nomad team'
])
])
...
...
ops/helm/nomad/templates/api-deployment.yaml
View file @
620c6d4b