Commit ad652765 authored by Markus Scheidgen's avatar Markus Scheidgen
Browse files

Added capabilities to remove datasets.

parent 63ac721e
Pipeline #63403 passed with stages
in 13 minutes and 19 seconds
......@@ -8,6 +8,7 @@ import Search from './search/Search'
import SearchContext from './search/SearchContext'
import { Typography, Link } from '@material-ui/core'
import { DatasetActions } from './search/DatasetList'
import { withRouter } from 'react-router'
export const help = `
This page allows you to **inspect** and **download** NOMAD datasets. It alsow allows you
......@@ -19,7 +20,8 @@ class DatasetPage extends React.Component {
classes: PropTypes.object.isRequired,
api: PropTypes.object.isRequired,
datasetId: PropTypes.string.isRequired,
raiseError: PropTypes.func.isRequired
raiseError: PropTypes.func.isRequired,
history: PropTypes.object.isRequired
}
static styles = theme => ({
......@@ -35,6 +37,11 @@ class DatasetPage extends React.Component {
actions: {}
})
constructor(props) {
super(props)
this.handleChange = this.handleChange.bind(this)
}
state = {
dataset: {},
update: 0
......@@ -72,6 +79,14 @@ class DatasetPage extends React.Component {
}
}
handleChange(dataset) {
if (dataset) {
this.setState({dataset: dataset, update: this.state.update + 1})
} else {
this.props.history.goBack()
}
}
render() {
const { classes, datasetId } = this.props
const { dataset, update } = this.state
......@@ -89,7 +104,7 @@ class DatasetPage extends React.Component {
<div className={classes.actions}>
{dataset && dataset.example && <DatasetActions
dataset={dataset}
onChange={dataset => this.setState({dataset: dataset, update: this.state.update + 1})}/>
onChange={this.handleChange}/>
}
</div>
</div>
......@@ -102,4 +117,4 @@ class DatasetPage extends React.Component {
}
}
export default compose(withApi(false), withErrors, withStyles(DatasetPage.styles))(DatasetPage)
export default compose(withRouter, withApi(false), withErrors, withStyles(DatasetPage.styles))(DatasetPage)
......@@ -352,6 +352,15 @@ class Api {
.finally(this.onFinishLoading)
}
async deleteDataset(datasetName) {
this.onStartLoading()
return this.swagger()
.then(client => client.apis.datasets.delete_dataset({name: datasetName}))
.catch(handleApiError)
.then(response => response.body)
.finally(this.onFinishLoading)
}
async getUsers(query) {
// no loading indicator, because this is only used in the background of the edit dialog
return this.swagger()
......
......@@ -35,6 +35,7 @@ class DatasetActionsUnstyled extends React.Component {
super(props)
this.handleClickDOI = this.handleClickDOI.bind(this)
this.handleClickDataset = this.handleClickDataset.bind(this)
this.handleClickDelete = this.handleClickDelete.bind(this)
}
handleClickDataset() {
......@@ -55,6 +56,19 @@ class DatasetActionsUnstyled extends React.Component {
.catch(raiseError)
}
handleClickDelete() {
const {api, dataset, onChange, raiseError} = this.props
const datasetName = dataset.name
api.deleteDataset(datasetName)
.then(dataset => {
if (onChange) {
onChange(null)
}
})
.catch(raiseError)
}
render() {
const {dataset, search, user, classes} = this.props
......@@ -72,7 +86,7 @@ class DatasetActionsUnstyled extends React.Component {
</IconButton>
</Tooltip>}
{editable && canDelete && <Tooltip title="Delete this dataset.">
<IconButton onClick={this.handleClickDOI}>
<IconButton onClick={this.handleClickDelete}>
<DeleteIcon />
</IconButton>
</Tooltip>}
......@@ -94,7 +108,8 @@ class DatasetListUnstyled extends React.Component {
total: PropTypes.number.isRequired,
onChange: PropTypes.func.isRequired,
history: PropTypes.any.isRequired,
datasets_after: PropTypes.string
datasets_after: PropTypes.string,
actions: PropTypes.element
}
static styles = theme => ({
......@@ -155,7 +170,7 @@ class DatasetListUnstyled extends React.Component {
}
render() {
const { classes, data, total, datasets_after, onChange } = this.props
const { classes, data, total, datasets_after, onChange, actions } = this.props
const datasets = data.datasets || {values: []}
const results = Object.keys(datasets.values).map(id => {
const exampleDataset = datasets.values[id].examples[0].datasets.find(ds => ds.id === id)
......@@ -199,6 +214,7 @@ class DatasetListUnstyled extends React.Component {
entryActions={this.renderEntryActions}
data={results}
rows={per_page}
actions={actions}
pagination={pagination}
/>
}
......
import React from 'react'
import PropTypes from 'prop-types'
import { withStyles } from '@material-ui/core/styles'
import { Card, Button, List, ListItem, ListItemText, Tooltip, Tabs, Tab, Paper, FormControl, FormGroup, Checkbox, FormControlLabel, Popover, CardContent } from '@material-ui/core'
import { Card, Button, List, ListItem, ListItemText, Tooltip, Tabs, Tab, Paper, FormControl, FormGroup, Checkbox, FormControlLabel, Popover, CardContent, IconButton } from '@material-ui/core'
import SearchBar from './SearchBar'
import EntryList from './EntryList'
import DatasetList from './DatasetList'
......@@ -10,6 +10,7 @@ import { DisableOnLoading } from '../api'
import { withDomain } from '../domains'
import KeepState from '../KeepState'
import PeriodicTable from './PeriodicTable'
import ReloadIcon from '@material-ui/icons/Cached'
class Search extends React.Component {
static propTypes = {
......@@ -373,6 +374,20 @@ class OwnerSelect extends React.Component {
}
}
class ReRunSearchButton extends React.PureComponent {
static contextType = SearchContext.type
render() {
const {setRequest} = this.context
return <Tooltip title="Re-execute the search.">
<IconButton onClick={() => setRequest({})}>
<ReloadIcon />
</IconButton>
</Tooltip>
}
}
class SearchEntryList extends React.Component {
static contextType = SearchContext.type
......@@ -384,6 +399,7 @@ class SearchEntryList extends React.Component {
editable={query.owner === 'staging' || query.owner === 'user'}
data={response}
onChange={setRequest}
actions={<ReRunSearchButton/>}
{...request}
/>
}
......@@ -398,6 +414,7 @@ class SearchDatasetList extends React.Component {
return <DatasetList data={response}
total={response.statistics.total.all.datasets}
onChange={setRequest}
actions={<ReRunSearchButton/>}
{...response}
/>
}
......
......@@ -15,9 +15,8 @@
from flask import request, g
from flask_restplus import Resource, fields, abort
import re
import elasticsearch
from nomad import utils, search, processing as proc, datamodel, infrastructure
from nomad import utils
from nomad.app.utils import with_logger
from nomad.datamodel import Dataset
from nomad.metainfo.flask_restplus import generate_flask_restplus_model
......@@ -25,6 +24,7 @@ from nomad.metainfo.flask_restplus import generate_flask_restplus_model
from .api import api
from .auth import authenticate
from .common import pagination_model, pagination_request_parser
from .repo import edit
ns = api.namespace(
......@@ -142,24 +142,7 @@ class DatasetResource(Resource):
logger.warning('real DOI assign is not implemented yet', user_id=g.user.user_id)
# update all affected calcs in the search index
search_request = search.SearchRequest().search_parameter('dataset_id', result.dataset_id)
calc_ids = list(hit['calc_id'] for hit in search_request.execute_scan())
def elastic_updates():
for calc in proc.Calc.objects(calc_id__in=calc_ids):
entry = search.Entry.from_calc_with_metadata(
datamodel.CalcWithMetadata(**calc['metadata']))
entry = entry.to_dict(include_meta=True)
entry['_op_type'] = 'index'
yield entry
_, failed = elasticsearch.helpers.bulk(
infrastructure.elastic_client, elastic_updates(), stats_only=True)
search.refresh()
if failed > 0:
logger.error(
'update index after assign DOI with failed elastic updates',
dataset_id=result.dataset_id, nfailed=len(failed))
edit(dict(dataset_id=result.dataset_id), logger)
return result
......@@ -168,15 +151,24 @@ class DatasetResource(Resource):
@api.response(400, 'The dataset has a DOI and cannot be deleted')
@api.marshal_with(dataset_model, skip_none=True, code=200, description='Dateset deleted')
@authenticate(required=True)
def delete(self, name: str):
@with_logger
def delete(self, name: str, logger):
""" Assign a DOI to the dataset. """
result = Dataset.m_def.m_x('me').objects(user_id=g.user.user_id, name=name).first()
if result is None:
try:
result = Dataset.m_def.m_x('me').get(user_id=g.user.user_id, name=name)
except KeyError:
abort(404, 'Dataset with name %s does not exist for current user' % name)
if result.doi is not None:
abort(400, 'Dataset with name %s has a DOI and cannot be deleted' % name)
result.delete()
# edit all affected entries
edit(
dict(dataset_id=result.dataset_id),
logger,
{'__raw__': {'$pull': {'metadata.datasets': result.dataset_id}}})
return Dataset.m_def.m_x('me').to_metainfo(result)
# delete the dataset
result.m_x('me').delete()
return result
......@@ -335,6 +335,37 @@ repo_edit_model = api.model('RepoEdit', {
})
def edit(parsed_query: Dict[str, Any], logger, mongo_update: Dict[str, Any] = None, re_index=True):
# get all calculations that have to change
search_request = search.SearchRequest()
add_query(search_request, parsed_query)
calc_ids = list(hit['calc_id'] for hit in search_request.execute_scan())
# perform the update on the mongo db
if mongo_update is not None:
n_updated = proc.Calc.objects(calc_id__in=calc_ids).update(multi=True, **mongo_update)
if n_updated != len(calc_ids):
logger.error('edit repo did not update all entries', payload=mongo_update)
# re-index the affected entries in elastic search
if re_index:
def elastic_updates():
for calc in proc.Calc.objects(calc_id__in=calc_ids):
entry = search.Entry.from_calc_with_metadata(
datamodel.CalcWithMetadata(**calc['metadata']))
entry = entry.to_dict(include_meta=True)
entry['_op_type'] = 'index'
yield entry
_, failed = elasticsearch.helpers.bulk(
infrastructure.elastic_client, elastic_updates(), stats_only=True)
search.refresh()
if failed > 0:
logger.error(
'edit repo with failed elastic updates',
payload=mongo_update, nfailed=len(failed))
@ns.route('/edit')
class EditRepoCalcsResource(Resource):
@api.doc('edit_repo')
......@@ -444,31 +475,8 @@ class EditRepoCalcsResource(Resource):
parsed_query[quantity_name] = value
parsed_query['owner'] = owner
search_request = search.SearchRequest()
add_query(search_request, parsed_query)
calc_ids = list(hit['calc_id'] for hit in search_request.execute_scan())
# perform the update on the mongo db
n_updated = proc.Calc.objects(calc_id__in=calc_ids).update(multi=True, **mongo_update)
if n_updated != len(calc_ids):
logger.error('edit repo did not update all entries', payload=json_data)
# re-index the affected entries in elastic search
def elastic_updates():
for calc in proc.Calc.objects(calc_id__in=calc_ids):
entry = search.Entry.from_calc_with_metadata(
datamodel.CalcWithMetadata(**calc['metadata']))
entry = entry.to_dict(include_meta=True)
entry['_op_type'] = 'index'
yield entry
_, failed = elasticsearch.helpers.bulk(
infrastructure.elastic_client, elastic_updates(), stats_only=True)
search.refresh()
if failed > 0:
logger.error(
'edit repo with failed elastic updates',
payload=json_data, nfailed=len(failed))
# perform the change
edit(parsed_query, logger, mongo_update, True)
return json_data, 200
......
......@@ -96,6 +96,11 @@ class MEInstance():
self.me_obj = self.me_section.me_cls()
return self.save()
def delete(self):
self.me_obj.delete()
self.me_obj = None
return self.metainfo
def generate_mongoengine(section_def: Section):
def generate_field(quantity: Quantity):
......
......@@ -1430,6 +1430,23 @@ class TestDataset:
if name is not None:
assert dataset.get('name') == name
def assert_dataset_entry(self, api, dataset_id: str, exists: bool, with_doi: bool, **kwargs):
rv = api.get('/repo/?dataset_id=%s' % dataset_id, **kwargs)
assert rv.status_code == 200
data = json.loads(rv.data)
total = data['pagination']['total']
if exists:
assert total > 0
else:
assert total == 0
if exists:
doi = data['results'][0]['datasets'][0]['doi']
if with_doi:
assert doi is not None
else:
assert doi is None
def test_create_dataset(self, api, test_user_auth):
rv = api.put(
'/datasets/', headers=test_user_auth,
......@@ -1484,41 +1501,33 @@ class TestDataset:
rv = api.get('/datasets/ds1', headers=other_test_user_auth)
assert rv.status_code == 404
def test_post_dataset(self, api, test_user_auth, example_datasets):
rv = api.post('/datasets/ds1', headers=test_user_auth)
# TODO the actual DOI part needs to be implemented
assert rv.status_code == 200
@pytest.fixture()
def example_dataset_with_entry(self, mongo, elastic, example_datasets):
calc = CalcWithMetadata(
calc_id='1', upload_id='1', published=True, with_embargo=False, datasets=['1'])
Calc(
calc_id='1', upload_id='1', create_time=datetime.datetime.now(),
metadata=calc.to_dict()).save()
search.Entry.from_calc_with_metadata(calc).save()
search.refresh()
def test_delete_dataset(self, api, test_user_auth, example_datasets):
def test_delete_dataset(self, api, test_user_auth, example_dataset_with_entry):
# delete dataset
rv = api.delete('/datasets/ds1', headers=test_user_auth)
assert rv.status_code == 200
data = json.loads(rv.data)
self.assert_dataset(data, name='ds1')
api.get('/datasets/ds1', headers=test_user_auth).status_code == 404
self.assert_dataset_entry(api, '1', False, False, headers=test_user_auth)
def test_get_dataset_with_doi(self, api, test_user_auth, example_datasets):
rv = api.delete('/datasets/ds2', headers=test_user_auth)
assert rv.status_code == 400
def test_assign_doi(self, api, elastic, test_user_auth, example_datasets):
# create a calc entry to observe its re-indexing after DOI was assigned
calc = CalcWithMetadata(
calc_id='1', upload_id='1', published=True, with_embargo=False, datasets=['1'])
Calc(
calc_id='1', upload_id='1', create_time=datetime.datetime.now(),
metadata=calc.to_dict()).save()
search.Entry.from_calc_with_metadata(calc).save()
search.refresh()
def test_assign_doi(self, api, test_user_auth, example_dataset_with_entry):
# assign doi
rv = api.post('/datasets/ds1', headers=test_user_auth)
assert rv.status_code == 200
data = json.loads(rv.data)
self.assert_dataset(data, name='ds1', doi=True)
# assert if index has changed
rv = api.get('/repo/?dataset_id=1', headers=test_user_auth)
assert rv.status_code == 200
data = json.loads(rv.data)
assert data['pagination']['total'] > 0
assert data['results'][0]['datasets'][0]['doi'] is not None
self.assert_dataset_entry(api, '1', True, True, headers=test_user_auth)
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment