integrationtests.py 8.46 KB
Newer Older
Markus Scheidgen's avatar
Markus Scheidgen committed
1
2
3
4
#
# Copyright The NOMAD Authors.
#
# This file is part of NOMAD. See https://nomad-lab.eu for further info.
5
6
7
8
9
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
Markus Scheidgen's avatar
Markus Scheidgen committed
10
#     http://www.apache.org/licenses/LICENSE-2.0
11
12
#
# Unless required by applicable law or agreed to in writing, software
Markus Scheidgen's avatar
Markus Scheidgen committed
13
# distributed under the License is distributed on an "AS IS" BASIS,
14
15
16
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Markus Scheidgen's avatar
Markus Scheidgen committed
17
#
18

19
'''
20
21
A command that runs some example operations on a working nomad@FAIRDI installation
as a final integration test.
22
'''
23
24

import time
25
import os
26
import json
27

28
from nomad.client import api
29
30


31
32
33
def integrationtests(auth: api.Auth, skip_parsers: bool, skip_publish: bool, skip_doi: bool):
    multi_code_example_file = 'tests/data/integration/multi_code_data.zip'
    simple_example_file = 'tests/data/integration/examples_vasp.zip'
34
35
    has_doi = False
    published = False
36

37
    print('get the upload command')
38
39
    response = api.get('uploads/command-examples', auth=auth)
    assert response.status_code == 200, response.text
40
    command = response.json()['upload_command_with_name']
41

42
    def get_upload(upload):
43
44
45
46
47
48
49
50
        first = True
        while first or upload['process_running']:
            first = False
            response = api.get(f'uploads/{upload["upload_id"]}', auth=auth)
            if response.status_code == 404:
                return None
            assert response.status_code == 200, response.text
            upload = response.json()['data']
51
52
53
            time.sleep(0.3)

        return upload
54

55
    response = api.get('uploads', params=dict(upload_name='integration_test_upload'), auth=auth)
56
57
    assert response.status_code == 200, response.text
    uploads = response.json()['data']
58
59
    assert len(uploads) == 0, 'the test upload must not exist before'

60
61
62
63
64
65
66
    if not skip_parsers:
        print('upload multi code test data with curl')
        command = command.replace('<local_file>', multi_code_example_file)
        command = command.replace('<name>', 'integration_test_upload')
        command += ' -k'
        code = os.system(command)
        assert code == 0, 'curl command must be successful'
67
        response = api.get('uploads', params=dict(upload_name='integration_test_upload'), auth=auth)
68
69
70
71
        assert response.status_code == 200, response.text
        response_json = response.json()
        assert len(response_json['data']) == 1, 'exactly one test upload must be on the server'
        upload = response_json['data'][0]
72
73
74
75

        print('observe the upload process to be finished')
        upload = get_upload(upload)

76
        assert upload['process_status'] == 'SUCCESS'
77
78

        print('delete the upload again')
79
80
        upload = api.delete(f'uploads/{upload["upload_id"]}', auth=auth).json()['data']
        upload = get_upload(upload)
81
82
83

    print('upload simple data with API')
    with open(simple_example_file, 'rb') as f:
84
        response = api.post(
85
            'uploads', files=dict(file=f), params=dict(upload_name='integration_test_upload'),
86
87
88
            auth=auth, headers={'Accept': 'application/json'})
        assert response.status_code == 200, response.text
        upload = response.json()['data']
89

90
    print('observe the upload process to be finished')
91
    upload = get_upload(upload)
92
93
94
95
    response = api.get(f'uploads/{upload["upload_id"]}/entries', auth=auth)
    assert response.status_code == 200, response.text
    entries = response.json()['data']
    assert upload['entries'] == len(entries)
96
97
98

    try:
        print('get repo data')
99
100
101
102
103
        for entry in entries:
            response = api.get(f'entries/{entry["entry_id"]}', auth=auth)
            assert response.status_code == 200, response.text
            entry_metadata = response.json()['data']
            entry_metadata['entry_id'] == entry['entry_id']
104
105

        print('get archive data')
106
107
108
        for entry in entries:
            api.get(f'entries/{entry["entry_id"]}/archive/download', auth=auth)
            assert response.status_code == 200, response.text
109
110

        print('get archive logs')
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
        for entry in entries:
            response = api.post(
                f'entries/{entry["entry_id"]}/archive/query',
                data=json.dumps({
                    'required': {
                        'processing_logs': '*'
                    }
                }), auth=auth)
            assert response.status_code == 200, response.text
            assert list(response.json()['data']['archive'].keys()) == ['processing_logs']

        query_request_params = dict(
            owner='staging',
            query={
                'upload_id': upload['upload_id']
            })
127

128
        print('perform repo search on data')
129
130
131
132
133
        response = api.post('entries/query', data=json.dumps(query_request_params), auth=auth)
        assert response.status_code == 200, response.text
        response_json = response.json()
        assert response_json['pagination']['total'] == 2
        assert response_json['pagination']['total'] == len(response_json['data'])
134

135
        print('performing archive paginated search')
136
137
138
139
140
141
142
        response = api.post('entries/archive/query', data=json.dumps(dict(
            pagination=dict(page_size=1, page_offset=1),
            **query_request_params)), auth=auth)
        assert response.status_code == 200, response.text
        response_json = response.json()
        assert response_json['pagination']['total'] == 2
        assert len(response_json['data']) == 1
143

144
        print('performing archive scrolled search')
145
146
147
148
149
150
151
152
153
154
155
        response = api.post('entries/archive/query', data=json.dumps(dict(
            pagination=dict(page_size=1),
            **query_request_params)), auth=auth)
        response_json = response.json()
        response = api.post('entries/archive/query', data=json.dumps(dict(
            pagination=dict(page_size=1, page_after_value=response_json['pagination']['next_page_after_value']),
            **query_request_params)), auth=auth)
        assert response.status_code == 200, response.text
        response_json = response.json()
        assert response_json['pagination']['total'] == 2
        assert len(response_json['data']) == 1
156

157
        print('performing download')
158
159
160
161
        response = api.get(
            'entries/raw/download',
            params=dict(upload_id=upload['upload_id'], owner='visible'), auth=auth)
        assert response.status_code == 200, response.text
162

163
        if not skip_publish:
164
            print('publish upload')
165
            api.post(f'uploads/{upload["upload_id"]}/action/publish')
166

167
168
            upload = get_upload(upload)
            assert upload['process_status'] == 'SUCCESS', 'publish must be successful'
169
            published = True
170

171
        print('editing upload')
172
173
174
        response = api.get('users', params=dict(prefix='Markus Scheidgen'))
        assert response.status_code == 200, response.text
        user = response.json()['data'][0]
Alvin Noe Ladines's avatar
Alvin Noe Ladines committed
175
176
177
178
        dataset = 'test_dataset'
        actions = {
            'comment': {'value': 'Test comment'},
            'references': [{'value': 'http;//test_reference.com'}],
179
            'entry_coauthors': [{'value': user['user_id']}],
Alvin Noe Ladines's avatar
Alvin Noe Ladines committed
180
181
            'datasets': [{'value': dataset}]}

182
        response = api.post(
183
            'entries/edit_old',
184
185
186
            data=json.dumps(dict(actions=actions, **query_request_params)),
            auth=auth)
        assert response.status_code == 200, response.text
187
188

        print('list datasets')
189
        response = api.get('datasets', auth=auth, params=dict(dataset_name=dataset))
190
191
192
193
        assert response.status_code == 200, response.text
        response_json = response.json()
        assert len(response_json['data']) == 1, response.text
        dataset_id = response_json['data'][0]['dataset_id']
Alvin Noe Ladines's avatar
Alvin Noe Ladines committed
194

195
        if not skip_doi and published:
Alvin Noe Ladines's avatar
Alvin Noe Ladines committed
196
            print('assigning a DOI')
197
            response = api.post(f'datasets/{dataset_id}/action/doi', auth=auth)
198
            assert response.status_code == 200, response.text
199
200
            has_doi = True

201
        if not has_doi or auth.user == 'admin':
202
            print('deleting dataset')
203
204
            response = api.delete(f'datasets/{dataset_id}', auth=auth)
            assert response.status_code == 200, response.text
205
206

    finally:
207
        if not published or auth.user == 'admin':
208
            print('delete the upload again')
209
210
            upload = api.delete(f'uploads/{upload["upload_id"]}', auth=auth).json()['data']
            assert get_upload(upload) is None