Commit 80b4a0cc authored by Markus Scheidgen's avatar Markus Scheidgen
Browse files

Updated GUI after mi2 datamodel refactor.

parent f6fc5d24
Pipeline #70045 failed with stages
in 4 minutes and 34 seconds
eels @ 6bda4840
Subproject commit 6bda4840730dd24f1541f596a6ee1ead70ee1ee3
......@@ -21,8 +21,7 @@ class DatasetPage extends React.Component {
api: PropTypes.object.isRequired,
datasetId: PropTypes.string.isRequired,
raiseError: PropTypes.func.isRequired,
history: PropTypes.object.isRequired,
domain: PropTypes.object.isRequired
history: PropTypes.object.isRequired
}
static styles = theme => ({
......@@ -90,7 +89,7 @@ class DatasetPage extends React.Component {
}
render() {
const { classes, datasetId, domain } = this.props
const { classes, datasetId } = this.props
const { dataset, update, empty } = this.state
return (
......@@ -118,9 +117,6 @@ class DatasetPage extends React.Component {
>
<Search
resultTab="entries" tabs={['entries', 'groups', 'datasets']}
entryListProps={{
selectedColumns: [...domain.defaultSearchResultColumns, 'published', 'authors']
}}
/>
</SearchContext>
</div>
......
......@@ -60,7 +60,7 @@ class UserdataPage extends React.Component {
<SearchContext
{...this.props}
ownerTypes={['user', 'staging']} initialQuery={{owner: 'user'}}
initialRequest={{order_by: 'upload_time', uploads: true}}
initialRequest={{order_by: 'upload_time', uploads_grouped: true}}
>
<Search
resultTab="uploads"
......
......@@ -13,6 +13,10 @@ export default class DFTEntryOverview extends React.Component {
render() {
const { data } = this.props
if (!data.dft) {
return <Typography color="error">No metadata available</Typography>
}
return (
<Quantity column>
<Quantity row>
......
......@@ -53,8 +53,8 @@ export const domains = ({
// The unique_geometries search aggregates unique geometries based on 10^8 hashes.
// This takes to long in elastic search for a reasonable user experience.
// Therefore, we only support geometries without uniqueness check
'dft.geometries': {
label: 'Geometries',
'dft.unique_geometries': {
label: 'Unique geometries',
shortLabel: 'Geometries',
tooltip: 'Aggregates the number of simulated system geometries in all entries.',
renderResultString: count => (<span> that simulate <b>{count.toLocaleString()}</b> unique geometrie{count === 1 ? '' : 's'}</span>)
......
......@@ -12,7 +12,13 @@ export default class EMSEntryOverview extends React.Component {
render() {
const { data } = this.props
const { ems: { preview_url } } = data
const { ems } = data
if (!ems) {
return <Typography color="error">No metadata available</Typography>
}
const preview_url = ems && ems.preview_url
let relative_preview_url = null
if (!preview_url) {
......@@ -39,7 +45,7 @@ export default class EMSEntryOverview extends React.Component {
<Quantity quantity="ems.experiment_location" label="experiment location" noWrap {...this.props} />
<Quantity label="experiment time" {...this.props}>
<Typography noWrap>{
data.ems.experiment_time !== 'unavailable' ? new Date(data.ems.experiment_time * 1000).toLocaleString() : 'unavailable'
data.ems.experiment_time && data.ems.experiment_time !== 'unavailable' ? new Date(data.ems.experiment_time * 1000).toLocaleString() : 'unavailable'
}</Typography>
</Quantity>
<Quantity label="data" {...this.props}>
......
......@@ -248,7 +248,7 @@ class DatasetListUnstyled extends React.Component {
render() {
const { classes, data, total, datasets_after, onChange, actions } = this.props
const datasets = data.datasets || {values: []}
const datasets = data.datasets_grouped || {values: []}
const results = Object.keys(datasets.values).map(id => {
const exampleDataset = datasets.values[id].examples[0].datasets.find(ds => ds.id === id)
return {
......@@ -272,10 +272,10 @@ class DatasetListUnstyled extends React.Component {
<Toolbar className={classes.scrollBar}>
<span className={classes.scrollSpacer}>&nbsp;</span>
<span>{paginationText}</span>
<IconButton disabled={!datasets_after} onClick={() => onChange({datasets_after: null})}>
<IconButton disabled={!datasets_after} onClick={() => onChange({datasets_grouped_after: null})}>
<StartIcon />
</IconButton>
<IconButton disabled={results.length < per_page} onClick={() => onChange({datasets_after: after})}>
<IconButton disabled={results.length < per_page} onClick={() => onChange({datasets_grouped_after: after})}>
<NextIcon />
</IconButton>
</Toolbar>
......
......@@ -167,7 +167,7 @@ class GroupListUnstyled extends React.Component {
render() {
const { classes, data, total, groups_after, onChange, actions, domain } = this.props
const groups = data['dft.groups'] || {values: []}
const groups = data['dft.groups_grouped'] || {values: []}
const results = Object.keys(groups.values).map(group_hash => {
const example = groups.values[group_hash].examples[0]
return {
......@@ -194,10 +194,10 @@ class GroupListUnstyled extends React.Component {
<Toolbar className={classes.scrollBar}>
<span className={classes.scrollSpacer}>&nbsp;</span>
<span>{paginationText}</span>
<IconButton disabled={!groups_after} onClick={() => onChange({groups_after: null})}>
<IconButton disabled={!groups_after} onClick={() => onChange({'dft.groups_grouped_after': null})}>
<StartIcon />
</IconButton>
<IconButton disabled={results.length < per_page} onClick={() => onChange({groups_after: after})}>
<IconButton disabled={results.length < per_page} onClick={() => onChange({'dft.groups_grouped_after': after})}>
<NextIcon />
</IconButton>
</Toolbar>
......
......@@ -61,7 +61,7 @@ class Search extends React.Component {
padding: `6px 0 2px 0`
},
domainButton: {
margin: theme.spacing.unit,
margin: theme.spacing.unit
},
metricButton: {
margin: theme.spacing.unit,
......@@ -130,9 +130,9 @@ class Search extends React.Component {
this.setState({resultTab: tab}, () => {
setRequest({
uploads: tab === 'uploads' ? true : undefined,
datasets: tab === 'datasets' ? true : undefined,
'dft.groups': tab === 'groups' ? true : undefined
uploads_grouped: tab === 'uploads' ? true : undefined,
datasets_grouped: tab === 'datasets' ? true : undefined,
'dft.groups_grouped': tab === 'groups' ? true : undefined
})
})
}
......@@ -189,7 +189,7 @@ class Search extends React.Component {
{tabs.map(tab => <KeepState
key={tab}
visible={resultTab === tab}
render={() => Search.tabs[tab].render({domain: domain , ...entryListProps})}
render={() => Search.tabs[tab].render({domain: domain, ...entryListProps})}
/>)}
</Paper>
</div>
......@@ -274,7 +274,6 @@ class ElementsVisualization extends React.Component {
}
class MetricSelect extends React.Component {
static contextType = SearchContext.type
constructor(props) {
......@@ -556,7 +555,7 @@ class SearchDatasetList extends React.Component {
return <DatasetList data={response}
total={response.statistics.total.all.datasets}
datasets_after={response.datasets && response.datasets.after}
datasets_after={response.datasets_grouped && response.datasets_grouped.after}
onChange={setRequest}
actions={<ReRunSearchButton/>}
{...response} {...this.props}
......@@ -572,7 +571,7 @@ class SearchGroupList extends React.Component {
return <GroupList data={response}
total={response.statistics.total.all['dft.groups']}
groups_after={response['dft.groups'] && response['dft.groups'].after}
groups_after={response['dft.groups_grouped'] && response['dft.groups_grouped'].after}
onChange={setRequest}
actions={<ReRunSearchButton/>}
{...response} {...this.props}
......@@ -588,7 +587,7 @@ class SearchUploadList extends React.Component {
return <UploadList data={response}
total={response.statistics.total.all.uploads}
uploads_after={response.uploads && response.uploads.after}
uploads_after={response.uploads_grouped && response.uploads_grouped.after}
onChange={setRequest}
actions={<ReRunSearchButton/>}
{...response} {...this.props}
......
......@@ -92,8 +92,8 @@ class SearchContext extends React.Component {
}
this.setState(
{
domain: domains[domain] || domains.dft,
query: newQuery
domain: domains[domain] || domains.dft,
query: newQuery
}, () => this.handleRequestChange({domain: domain}))
}
}
......
......@@ -182,7 +182,7 @@ class UploadListUnstyled extends React.Component {
render() {
const { classes, data, total, uploads_after, onChange, actions } = this.props
const uploads = data.uploads || {values: []}
const uploads = data.uploads_grouped || {values: []}
const results = Object.keys(uploads.values).map(id => {
return {
id: id,
......@@ -204,10 +204,10 @@ class UploadListUnstyled extends React.Component {
<Toolbar className={classes.scrollBar}>
<span className={classes.scrollSpacer}>&nbsp;</span>
<span>{paginationText}</span>
<IconButton disabled={!uploads_after} onClick={() => onChange({uploads_after: null})}>
<IconButton disabled={!uploads_after} onClick={() => onChange({uploads_grouped_after: null})}>
<StartIcon />
</IconButton>
<IconButton disabled={results.length < per_page} onClick={() => onChange({uploads_after: after})}>
<IconButton disabled={results.length < per_page} onClick={() => onChange({uploads_grouped_after: after})}>
<NextIcon />
</IconButton>
</Toolbar>
......
......@@ -88,7 +88,7 @@ _search_request_parser.add_argument(
'exclude', type=str, action='split', help='Excludes the given keys in the returned data.')
for group_name in search.groups:
_search_request_parser.add_argument(
'group_%s' % group_name, type=bool, help=('Return %s group data.' % group_name))
group_name, type=bool, help=('Return %s group data.' % group_name))
_search_request_parser.add_argument(
'%s_after' % group_name, type=str,
help='The last %s id of the last scroll window for the %s group' % (group_name, group_name))
......@@ -170,7 +170,7 @@ class RepoCalcsResource(Resource):
metrics: List[str] = request.args.getlist('metrics')
with_statistics = args.get('statistics', False) or \
any(args.get('group_%s' % group_name, False) for group_name in search.groups)
any(args.get(group_name, False) for group_name in search.groups)
except Exception as e:
abort(400, message='bad parameters: %s' % str(e))
......@@ -198,7 +198,7 @@ class RepoCalcsResource(Resource):
additional_metrics = [
group_quantity.metric_name
for group_name, group_quantity in search.groups.items()
if args.get('group_%s' % group_name, False)]
if args.get(group_name, False)]
total_metrics = metrics + additional_metrics
......@@ -218,7 +218,7 @@ class RepoCalcsResource(Resource):
else:
for group_name, group_quantity in search.groups.items():
if args.get('group_%s' % group_name, False):
if args.get(group_name, False):
kwargs: Dict[str, Any] = {}
if group_name == 'group_uploads':
kwargs.update(order_by='upload_time', order='desc')
......@@ -240,7 +240,7 @@ class RepoCalcsResource(Resource):
quantities = results.pop('quantities')
for group_name, group_quantity in search.groups.items():
if args.get('group_%s' % group_name, False):
if args.get(group_name, False):
results[group_name] = quantities[group_quantity.qualified_name]
# build python code/curl snippet
......
......@@ -133,7 +133,7 @@ class DFTMetadata(MSection):
n_geometries = Quantity(
type=int, description='Number of unique geometries.',
a_sesrch=SearchQuantity(metric_name='geometries', metric='sum'))
a_search=SearchQuantity(metric_name='geometries', metric='sum'))
n_calculations = Quantity(
type=int,
......@@ -162,7 +162,7 @@ class DFTMetadata(MSection):
group_hash = Quantity(
type=str,
description='Hashes that describe unique geometries simulated by this code run.',
a_search=SearchQuantity(many_or='append', group='groups', metric_name='groups', metric='cardinality'))
a_search=SearchQuantity(many_or='append', group='groups_grouped', metric_name='groups', metric='cardinality'))
labels = SubSection(
sub_section=Label, repeats=True,
......
......@@ -16,7 +16,7 @@
Experimental material science specific metadata
'''
from nomad import utils
from nomad import utils, config
from nomad.metainfo import Quantity, MSection, Section, Datetime
from nomad.metainfo.search import SearchQuantity
......@@ -34,7 +34,7 @@ class EMSMetadata(MSection):
# general metadata
experiment_summary = Quantity(type=str, default='not processed', a_search=SearchQuantity())
experiment_location = Quantity(type=str, default='not processed', a_search=SearchQuantity())
experiment_time = Quantity(type=Datetime, default='not processed', a_search=SearchQuantity())
experiment_time = Quantity(type=Datetime, a_search=SearchQuantity())
# method
method = Quantity(type=str, default='not processed', a_search=SearchQuantity(default_statistic=True))
......@@ -77,8 +77,10 @@ class EMSMetadata(MSection):
backend, 'experiment_summary', 'section_experiment', logger=logger)
self.experiment_location = get_optional_backend_value(
backend, 'experiment_location', 'section_experiment', logger=logger)
self.experiment_time = get_optional_backend_value(
backend, 'experiment_time', 'section_experiment', logger=logger)
experiment_time = get_optional_backend_value(
backend, 'experiment_time', 'section_experiment', None, logger=logger)
if experiment_time != config.services.unavailable_value:
self.experiment_time = experiment_time
self.method = get_optional_backend_value(
backend, 'experiment_method_name', 'section_method', logger=logger)
......
......@@ -252,7 +252,7 @@ class EntryMetadata(metainfo.MSection):
type=str,
description='A random UUID that uniquely identifies the upload of the entry.',
a_search=SearchQuantity(
many_or='append', group='uploads', metric_name='uploads', metric='cardinality'))
many_or='append', group='uploads_grouped', metric_name='uploads', metric='cardinality'))
calc_id = metainfo.Quantity(
type=str,
......@@ -409,7 +409,7 @@ class EntryMetadata(metainfo.MSection):
description='Search for a particular dataset by exact name.'),
SearchQuantity(
name='dataset_id', es_quantity='datasets.dataset_id', many_or='append',
group='datasets',
group='datasets_grouped',
metric='cardinality', metric_name='datasets',
description='Search for a particular dataset by its id.')])
......
......@@ -335,6 +335,9 @@ class _Datetime(DataType):
raise TypeError('Invalid date literal "{0}"'.format(datetime_str))
def _convert(self, value):
if value is None:
return None
if isinstance(value, str):
value = self._parse(value)
......@@ -350,6 +353,9 @@ class _Datetime(DataType):
return self._convert(value)
def serialize(self, section: 'MSection', quantity_def: 'Quantity', value: Any) -> Any:
if value is None:
return None
value.replace(tzinfo=pytz.utc)
return value.isoformat()
......
......@@ -791,10 +791,10 @@ class TestRepo():
assert rv.status_code == 404
def test_search_datasets(self, api, example_elastic_calcs, no_warn, other_test_user_auth):
rv = api.get('/repo/?owner=all&group_datasets=true', headers=other_test_user_auth)
rv = api.get('/repo/?owner=all&datasets_grouped=true', headers=other_test_user_auth)
data = self.assert_search(rv, 4)
datasets = data.get('datasets', None)
datasets = data.get('datasets_grouped', None)
assert datasets is not None
values = datasets['values']
assert values['ds_id']['total'] == 4
......@@ -804,10 +804,10 @@ class TestRepo():
assert data['statistics']['total']['all']['datasets'] > 0
def test_search_uploads(self, api, example_elastic_calcs, no_warn, other_test_user_auth):
rv = api.get('/repo/?owner=all&group_uploads=true', headers=other_test_user_auth)
rv = api.get('/repo/?owner=all&uploads_grouped=true', headers=other_test_user_auth)
data = self.assert_search(rv, 4)
uploads = data.get('uploads', None)
uploads = data.get('uploads_grouped', None)
assert uploads is not None
values = uploads['values']
......@@ -941,9 +941,9 @@ class TestRepo():
rv = api.get('/repo/?%s' % urlencode({
'metrics': metrics,
'group_statistics': True,
'group_dft.groups': True,
'group_datasets': True,
'group_uploads': True}, doseq=True))
'dft.groups_grouped': True,
'datasets_grouped': True,
'uploads_grouped': True}, doseq=True))
assert rv.status_code == 200
data = json.loads(rv.data)
......@@ -957,7 +957,7 @@ class TestRepo():
else:
assert len(metrics_result) == 1 # code_runs is the only metric for authors
for group in ['dft.groups', 'uploads', 'datasets']:
for group in ['dft.groups_grouped', 'uploads_grouped', 'datasets_grouped']:
assert group in data
assert 'after' in data[group]
assert 'values' in data[group]
......
......@@ -15,4 +15,11 @@ Authorization: Basic bGVvbmFyZC5ob2ZzdGFkdGVyQG5vbWFkLWZhaXJkaS50ZXN0cy5kZTo=
###
GET http://localhost:9200/test_nomad_fairdi_calcs/_search HTTP/1.1
\ No newline at end of file
GET http://localhost:9200/test_nomad_fairdi_calcs/_search HTTP/1.1
###
DELETE http://localhost:9200/nomad_fairdi_calcs HTTP/1.1
###
DELETE http://localhost:9200/nomad_fairdi_test HTTP/1.1
\ No newline at end of file
......@@ -15,11 +15,12 @@
import pytest
import numpy as np
import pint.quantity
import datetime
from nomadcore.local_meta_info import InfoKindEl, InfoKindEnv
from nomad.metainfo.metainfo import MSection, MCategory, Section, Quantity, SubSection, \
Definition, Package, DeriveError, MetainfoError, Environment, MResource, units
Definition, Package, DeriveError, MetainfoError, Environment, MResource, Datetime, units
from nomad.metainfo.example import Run, VaspRun, System, SystemHash, Parsing, m_package as example_package
from nomad.metainfo.legacy import LegacyMetainfoEnvironment
from nomad.parsing.metainfo import MetainfoBackend
......@@ -461,6 +462,29 @@ class TestM1:
assert run['parsing.parser_name'] == 'test'
class TestDatatypes:
def test_datetime(self):
class TestSection(MSection):
datetime = Quantity(type=Datetime)
obj = TestSection()
assert obj.datetime is None
assert 'datetime' not in obj.m_to_dict()
obj.datetime = datetime.datetime.now()
assert obj.datetime is not None
assert isinstance(obj.m_to_dict()['datetime'], str)
obj.datetime = obj.datetime.isoformat()
assert obj.datetime is not None
assert isinstance(obj.m_to_dict()['datetime'], str)
obj.datetime = None
assert obj.datetime is None
assert obj.m_to_dict()['datetime'] is None
class TestEnvironment:
@pytest.fixture
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment