Commit 214b1efb authored by Markus Scheidgen's avatar Markus Scheidgen
Browse files

Merge branch 'upload_dataset_fixes' into 'v1.0.0'

Upload dataset fixes ( #709)

See merge request !536
parents fbd66d32 2dbf8799
Pipeline #120211 passed with stages
in 27 minutes and 11 seconds
......@@ -96,11 +96,10 @@ function EditComments() {
}
function EditReferences() {
const {data, api, setIsReferencesChanged, setReferences} = useContext(editMetaDataDialogContext)
const {data, api, setIsReferencesChanged, setReferences, defaultReferences, setDefaultReferences} = useContext(editMetaDataDialogContext)
const [newReference, setNewReference] = useState('')
const [validation, setValidation] = useState('')
const [newReferences, setNewReferences] = useState([])
const [defaultReferences, setDefaultReferences] = useState([])
const [edit, setEdit] = useState({index: -1, value: '', validation: ''})
const columns = [
......@@ -128,13 +127,16 @@ function EditReferences() {
}, [defaultReferences, setIsReferencesChanged])
const validateAPI = useCallback((value) => {
let query = {metadata: {references: value}, verify_only: true}
let error = ''
api.post(`uploads/${data.upload.upload_id}/edit`, query)
.catch(err => {
error = err.apiMessage[0].msg
})
return error
return new Promise(async (resolve, reject) => {
try {
let query = {metadata: {references: value}, verify_only: true}
let response = await api.post(`uploads/${data.upload.upload_id}/edit`, query)
if (response) {}
resolve('')
} catch (error) {
reject(error.apiMessage[0].msg)
}
})
}, [api, data])
const validate = useCallback((value, index) => {
......@@ -158,7 +160,7 @@ function EditReferences() {
setDefaultReferences(_references)
}
}
}, [data])
}, [data, setDefaultReferences])
const handleTextFieldChange = (event) => {
let _newReference = event.target.value
......@@ -169,12 +171,19 @@ function EditReferences() {
const handleAdd = () => {
if (newReference) {
let _validation = validate(newReference, '')
if (_validation === '') _validation = validateAPI(newReference)
if (_validation === '') {
let _newReferences = [...newReferences, newReference]
setNewReferences(_newReferences)
setReferences(_newReferences)
checkChanges(_newReferences)
validateAPI(newReference)
.then(_api_validation => {
if (_api_validation === '') {
let _newReferences = [...newReferences, newReference]
setNewReferences(_newReferences)
setReferences(_newReferences)
checkChanges(_newReferences)
}
})
.catch(_api_validation => {
setValidation(_api_validation)
})
} else {
setValidation(_validation)
}
......@@ -288,7 +297,7 @@ ReferencesActions.propTypes = {
}
function EditDatasets() {
const {data, api, raiseError, setIsDatasetChanged, setDatasets} = useContext(editMetaDataDialogContext)
const {data, api, raiseError, setIsDatasetChanged, setDatasets, defaultDatasets, setDefaultDatasets} = useContext(editMetaDataDialogContext)
const [suggestions, setSuggestions] = useState([])
const [validation, setValidation] = useState('')
const [allDatasets, setAllDatasets] = useState([])
......@@ -296,7 +305,7 @@ function EditDatasets() {
const [addDataset, setAddDataset] = useState('')
const [newDatasets, setNewDatasets] = useState([])
const [isDuplicated, setIsDuplicated] = useState(false)
const [defaultDatasets, setDefaultDatasets] = useState([])
const [apiValidation, setApiValidation] = useState('')
const columns = useMemo(() => ([
{key: '', align: 'left', render: dataset => (dataset.doi ? <span> {`${dataset.dataset_name}, DOI:`} <DOI doi={dataset.doi} /></span> : dataset.dataset_name)}
......@@ -334,7 +343,20 @@ function EditDatasets() {
setDefaultDatasets(__datasets)
}
}
}, [data, allDatasets])
}, [data, allDatasets, setDefaultDatasets])
const validateAPI = useCallback((value) => {
return new Promise(async (resolve, reject) => {
try {
let query = {metadata: {datasets: value}, verify_only: true}
let response = await api.post(`uploads/${data.upload.upload_id}/edit`, query)
if (response) {}
resolve('')
} catch (error) {
reject(error.apiMessage[0].msg)
}
})
}, [api, data])
const validate = useCallback((value) => {
if (allDatasets.map(dataset => dataset.dataset_name).includes(value.dataset_name)) return `There is already a dataset with name ${value.dataset_name}`
......@@ -345,8 +367,13 @@ function EditDatasets() {
const handleAutoCompleteChange = (event, value) => {
if (value && value?.dataset_id) {
setAddDataset(value)
setIsDuplicated(newDatasets.map(dataset => dataset.dataset_id).includes(value.dataset_id))
validateAPI([value.dataset_id]).then(_validation => {
setApiValidation(_validation)
if (_validation === '') {
setAddDataset(value)
setIsDuplicated(newDatasets.map(dataset => dataset.dataset_id).includes(value.dataset_id))
}
}).catch(_validation => setApiValidation(_validation))
} else {
setAddDataset('')
}
......@@ -398,7 +425,7 @@ function EditDatasets() {
addDatasetButton = <Button color="primary" variant="contained" onClick={handleCreate}>
add entry to new dataset
</Button>
} else if (!isDuplicated && addDataset !== '') {
} else if (!isDuplicated && !apiValidation && addDataset !== '') {
addDatasetButton = <Button variant="contained" color="primary" onClick={handleAdd}>
add entry to existing dataset
</Button>
......@@ -429,7 +456,7 @@ function EditDatasets() {
<TextField
{...params}
variant='filled' label='Search for an existing dataset' placeholder='Dataset name' argin='normal' fullWidth size='small'
error={isDuplicated} helperText={isDuplicated && 'The data is already in the selected dataset'}
error={isDuplicated || apiValidation} helperText={(isDuplicated ? 'The data is already in the selected dataset' : apiValidation)}
/>
)}
/>
......@@ -491,7 +518,9 @@ function EditMetaDataDialog({...props}) {
const [comment, setComment] = useState('')
const [references, setReferences] = useState([])
const [defaultReferences, setDefaultReferences] = useState([])
const [datasets, setDatasets] = useState([])
const [defaultDatasets, setDefaultDatasets] = useState([])
const [isCommentChanged, setIsCommentChanged] = useState(false)
const [isReferencesChanged, setIsReferencesChanged] = useState(false)
const [isDatasetChanged, setIsDatasetChanged] = useState(false)
......@@ -533,12 +562,24 @@ function EditMetaDataDialog({...props}) {
if (isCommentChanged || isReferencesChanged || isDatasetChanged) {
let metadata = {}
if (isCommentChanged) metadata.comment = comment
if (isReferencesChanged) metadata.references = references
if (isReferencesChanged) {
metadata.references = {}
let referencesToAdd = references.filter(dataset => !defaultReferences.includes(dataset))
let referencesToRemove = defaultReferences.filter(dataset => !references.includes(dataset))
if (referencesToAdd && referencesToAdd.length !== 0) metadata.references.add = referencesToAdd
if (referencesToRemove && referencesToRemove.length !== 0) metadata.references.remove = referencesToRemove
}
if (isDatasetChanged) {
metadata.datasets = {}
createNewDatasets().then(newDatasets => {
metadata.datasets = datasets.filter(_dataset => _dataset.dataset_id !== _dataset.dataset_name)
let newDatasetsIDs = datasets.filter(_dataset => _dataset.dataset_id !== _dataset.dataset_name)
.map(_dataset => _dataset.dataset_id)
.concat(newDatasets.map(_dataset => _dataset.dataset_id))
let defaultDatasetsIDs = defaultDatasets.map(_dataset => _dataset.dataset_id)
let datasetsToAdd = newDatasetsIDs.filter(dataset => !defaultDatasetsIDs.includes(dataset))
let datasetsToRemove = defaultDatasetsIDs.filter(dataset => !newDatasetsIDs.includes(dataset))
if (datasetsToAdd && datasetsToAdd.length !== 0) metadata.datasets.add = datasetsToAdd
if (datasetsToRemove && datasetsToRemove.length !== 0) metadata.datasets.remove = datasetsToRemove
submitChanges(metadata)
})
} else {
......@@ -567,8 +608,13 @@ function EditMetaDataDialog({...props}) {
data: data,
setComment: setComment,
setReferences: setReferences,
setDatasets: setDatasets
}), [api, raiseError, setIsCommentChanged, setIsReferencesChanged, setIsDatasetChanged, upload, data, setComment, setReferences, setDatasets])
defaultReferences: defaultReferences,
setDefaultReferences: setDefaultReferences,
setDatasets: setDatasets,
defaultDatasets: defaultDatasets,
setDefaultDatasets: setDefaultDatasets
}), [api, raiseError, setIsCommentChanged, setIsReferencesChanged, setIsDatasetChanged, upload,
data, setComment, setReferences, defaultReferences, setDefaultReferences, setDatasets, defaultDatasets, setDefaultDatasets])
return <editMetaDataDialogContext.Provider value={contextValue}>
<React.Fragment>
......
......@@ -28,6 +28,7 @@ import EntryDownloadButton from '../entry/EntryDownloadButton'
import Quantity from '../Quantity'
import {uploadPageContext} from './UploadPage'
import EditMetaDataDialog from './EditMetaDataDialog'
import {pluralize} from '../../utils'
const columns = [
{
......@@ -86,7 +87,7 @@ const defaultSelectedColumns = [
export default function ProcessingTable(props) {
const [selected, setSelected] = useState([])
const {pagination} = props
const {pagination, customTitle} = props
const {upload, isWriter} = useContext(uploadPageContext)
const selectedQuery = useMemo(() => {
......@@ -102,7 +103,7 @@ export default function ProcessingTable(props) {
columns={columns} shownColumns={defaultSelectedColumns} {...props}
selected={selected} onSelectedChanged={setSelected}
>
<DatatableToolbar title={`${pagination.total} search results`}>
<DatatableToolbar title={pluralize((customTitle || 'search result'), pagination.total, true)}>
<DatatableToolbarActions selection>
<EntryDownloadButton tooltip="Download files" query={selectedQuery} />
{isWriter && <EditMetaDataDialog isIcon selectedEntries={selectedQuery}/>}
......@@ -117,5 +118,6 @@ export default function ProcessingTable(props) {
ProcessingTable.propTypes = {
data: PropTypes.arrayOf(PropTypes.object).isRequired,
pagination: PropTypes.object.isRequired,
onPaginationChanged: PropTypes.func.isRequired
onPaginationChanged: PropTypes.func.isRequired,
customTitle: PropTypes.string
}
......@@ -536,6 +536,7 @@ function UploadPage() {
<ProcessingTable
data={data.data.map(entry => ({...entry.entry_metadata, ...entry}))}
pagination={combinePagination(pagination, data.pagination)}
customTitle='entry'
onPaginationChanged={setPagination}/>
</StepContent>
</Step>
......
......@@ -670,6 +670,7 @@ export function pluralize(word, count, inclusive, format = true, prefix) {
// these words, the pluralize-library should be used instead.
const dictionary = {
'result': 'results',
'search result': 'search results',
'entry': 'entries',
'material': 'materials',
'dataset': 'datasets'
......
......@@ -61,6 +61,7 @@ from nomad.archive import (
from nomad.app.v1.models import (
MetadataEditRequest, And, Aggregation, TermsAggregation, MetadataPagination, MetadataRequired)
from nomad.search import update_metadata as es_update_metadata
import validators
section_metadata = datamodel.EntryArchive.metadata.name
section_workflow = datamodel.EntryArchive.workflow.name
......@@ -465,6 +466,8 @@ class MetadataEditRequestHandler:
assert value is None or type(value) == definition.type, f'Expected a {definition.type.__name__}'
if definition.name == 'embargo_length':
assert 0 <= value <= 36, 'Value should be between 0 and 36'
if definition.name == 'references':
assert validators.url(value), 'Please enter a valid URL ...'
return None if value == '' else value
elif definition.type == metainfo.Datetime:
if value is not None:
......
......@@ -86,6 +86,7 @@ fastapi==0.63.0
uvicorn[standard]==0.13.4
a2wsgi==1.4.0
python-multipart==0.0.5
validators==0.18.2
# [dev]
setuptools==57.5.0
......
......@@ -697,22 +697,22 @@ def test_read_metadata_from_file(proc_infra, test_user, other_test_user, tmp):
zf.write('tests/data/proc/templates/template.json', 'examples/template.json')
entry_1 = dict(
comment='Entry 1 of 3',
references='http://test1',
references='http://test1.com',
external_id='external_id_1')
with zf.open('examples/entry_1/nomad.yaml', 'w') as f: f.write(yaml.dump(entry_1).encode())
entry_2 = dict(
comment='Entry 2 of 3',
references=['http://test2'],
references=['http://test2.com'],
external_id='external_id_2')
with zf.open('examples/entry_2/nomad.json', 'w') as f: f.write(json.dumps(entry_2).encode())
metadata = {
'upload_name': 'my name',
'coauthors': other_test_user.user_id,
'references': ['http://test0'],
'references': ['http://test0.com'],
'entries': {
'examples/entry_3/template.json': {
'comment': 'Entry 3 of 3',
'references': 'http://test3',
'references': 'http://test3.com',
'external_id': 'external_id_3'
},
'examples/entry_1/template.json': {
......@@ -729,7 +729,7 @@ def test_read_metadata_from_file(proc_infra, test_user, other_test_user, tmp):
comment = ['root entries comment 1', 'Entry 2 of 3', 'Entry 3 of 3', None]
external_ids = ['external_id_1', 'external_id_2', 'external_id_3', None]
references = [['http://test1'], ['http://test2'], ['http://test3'], ['http://test0']]
references = [['http://test1.com'], ['http://test2.com'], ['http://test3.com'], ['http://test0.com']]
expected_coauthors = [other_test_user]
for i in range(len(entries)):
......
......@@ -33,7 +33,7 @@ all_coauthor_metadata = dict(
coauthors=['lhofstadter'],
external_id='31415926536',
comment='a humble comment',
references=['a reference', 'another reference'],
references=['http://reference1.com', 'http://reference2.com'],
external_db='AFLOW',
reviewers=['lhofstadter'],
datasets=['test_dataset_1'])
......@@ -359,25 +359,32 @@ def test_set_and_clear_all(proc_infra, example_data_writeable, example_datasets,
id='reviewers-remove+set'),
pytest.param(
dict(
metadata_1=dict(references='ref1'),
expected_metadata_1=dict(references=['ref1']),
metadata_2=dict(references={'add': ['ref2']}),
expected_metadata_2=dict(references=['ref1', 'ref2'])),
metadata_1=dict(references='http://ref1.com'),
expected_metadata_1=dict(references=['http://ref1.com']),
metadata_2=dict(references={'add': ['http://ref2.com']}),
expected_metadata_2=dict(references=['http://ref1.com', 'http://ref2.com'])),
id='references-add'),
pytest.param(
dict(
metadata_1=dict(references=['ref1', 'ref2']),
expected_metadata_1=dict(references=['ref1', 'ref2']),
metadata_2=dict(references={'add': 'ref3', 'remove': ['ref1']}),
expected_metadata_2=dict(references=['ref2', 'ref3'])),
metadata_1=dict(references=['http://ref1.com', 'http://ref2.com']),
expected_metadata_1=dict(references=['http://ref1.com', 'http://ref2.com']),
metadata_2=dict(references={'add': 'http://ref3.com', 'remove': ['http://ref1.com']}),
expected_metadata_2=dict(references=['http://ref2.com', 'http://ref3.com'])),
id='references-add+remove'),
pytest.param(
dict(
metadata_1=dict(references='ref1'),
expected_metadata_1=dict(references=['ref1']),
metadata_2=dict(references={'remove': 'ref4', 'add': ['ref2', 'ref3', 'ref4']}),
metadata_1=dict(references='http://ref1.com'),
expected_metadata_1=dict(references=['http://ref1.com']),
metadata_2=dict(references={'remove': 'http://ref4.com', 'add': ['http://ref2.com', 'http://ref3.com', 'http://ref4.com']}),
expected_error_loc_2=('metadata', 'references')),
id='references-add+remove-incoherent')])
id='references-add+remove-incoherent'),
pytest.param(
dict(
metadata_1=dict(references='http://ref1.com'),
expected_metadata_1=dict(references=['http://ref1.com']),
metadata_2=dict(references={'add': ['http://ref2', 'http://ref3.com']}),
expected_error_loc_2=('metadata', 'references')),
id='references-not-valid-URL')])
def test_list_quantities(proc_infra, purged_app, example_data_writeable, example_datasets, test_users_dict, kwargs):
def replace_dataset_ref(dataset_ref):
if dataset_ref.startswith('ref:'):
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment