API query problems
I am trying to run a query with requests
in which I am having issues succeeding when the page_size
> 40 or when I need to paginate. I copy some code examples just in case I am doing something wrong in writing the request appropriately.
import requests
import json
base_url = 'https://nomad-lab.eu/dev/rae/perovskite-database/api/v1'
bandgaps =[]
vocs = []
jscs = []
ffs = []
pces = []
formulas = []
data = requests.post(f'{base_url}/entries/archive/query', headers={'Authorization': f'Bearer {token}'}, json={
"owner": "visible",
"aggregations": {},
'query': {"results.material.material_name:any": ["perovskite"],
"results.properties.available_properties:all": ["optoelectronic.band_gap"],
"authors.name:any": ["Pepe Marquez"],
"results.material.elements:all": ["Sb"]},
'required': {'data':'*',
'results':{
'material': {
'chemical_formula_reduced':'*'},
'properties': {
'optoelectronic':{
'solar_cell':{
'open_circuit_voltage':'*',
'short_circuit_current_density':'*',
'fill_factor':'*',
'efficiency':'*',
}}},},
},
'pagination': {'page_size': 39}
}).json()
for entry in data['data']:
if 'data' not in entry['archive'].keys():
continue
elif 'perovskite' not in entry['archive']['data'].keys() or 'band_gap' not in entry['archive']['data']['perovskite'].keys():
continue
elif 'jv' not in entry['archive']['data'].keys() or 'default_Voc' not in entry['archive']['data']['jv'].keys():
continue
# elif 'results' not in entry['archive'].keys() or 'material' not in entry['archive']['results'].keys() or 'chemical_formula_reduced' not in entry['archive']['results']['material'].keys():
# continue
else:
bandgaps.append(float(entry['archive']['data']['perovskite']['band_gap']))
solar_cell = entry['archive']['results']['properties']['optoelectronic']['solar_cell']
vocs.append(solar_cell['open_circuit_voltage'])
jscs.append(solar_cell['short_circuit_current_density'])
ffs.append(solar_cell['fill_factor'])
pces.append(solar_cell['efficiency'])
formulas.append(entry['archive']['results']['material']['chemical_formula_reduced'])
This succeeds for 'pagination': {'page_size': 39}
but would fail for 'pagination': {'page_size': 40}
.
When it fails it throws me this error:
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
<ipython-input-158-bb2b15c104e4> in <module>()
36 }).json()
37
---> 38 for entry in data['data']:
39 if 'data' not in entry['archive'].keys():
40 continue
KeyError: 'data'
and when I inspect data
I get this:
{'detail': {'exception': "Can't pickle <function <lambda> at 0x7f542fe07b00>: attribute lookup <lambda> on nomad.metainfo.metainfo failed",
'exception_class': 'PicklingError',
'exception_traceback': 'Traceback (most recent call last):\n File "/usr/local/lib/python3.7/site-packages/starlette/middleware/errors.py", line 159, in __call__\n await self.app(scope, receive, _send)\n File "/usr/local/lib/python3.7/site-packages/starlette/middleware/cors.py", line 78, in __call__\n await self.app(scope, receive, send)\n File "/usr/local/lib/python3.7/site-packages/starlette/exceptions.py", line 82, in __call__\n raise exc from None\n File "/usr/local/lib/python3.7/site-packages/starlette/exceptions.py", line 71, in __call__\n await self.app(scope, receive, sender)\n File "/usr/local/lib/python3.7/site-packages/starlette/routing.py", line 566, in __call__\n await route.handle(scope, receive, send)\n File "/usr/local/lib/python3.7/site-packages/starlette/routing.py", line 227, in handle\n await self.app(scope, receive, send)\n File "/usr/local/lib/python3.7/site-packages/starlette/routing.py", line 41, in app\n response = await func(request)\n File "/usr/local/lib/python3.7/site-packages/fastapi/routing.py", line 202, in app\n dependant=dependant, values=values, is_coroutine=is_coroutine\n File "/usr/local/lib/python3.7/site-packages/fastapi/routing.py", line 148, in run_endpoint_function\n return await dependant.call(**values)\n File "./nomad/app/v1/routers/entries.py", line 809, in post_entries_archive_query\n required=data.required, user=user)\n File "./nomad/app/v1/routers/entries.py", line 769, in _answer_entries_archive_request\n zip_longest(*[iter(search_response.data)] * entries_per_process))\n File "/usr/local/lib/python3.7/multiprocessing/pool.py", line 268, in map\n return self._map_async(func, iterable, mapstar, chunksize).get()\n File "/usr/local/lib/python3.7/multiprocessing/pool.py", line 657, in get\n raise self._value\n File "/usr/local/lib/python3.7/multiprocessing/pool.py", line 431, in _handle_tasks\n put(task)\n File "/usr/local/lib/python3.7/multiprocessing/connection.py", line 206, in send\n self._send_bytes(_ForkingPickler.dumps(obj))\n File "/usr/local/lib/python3.7/multiprocessing/reduction.py", line 51, in dumps\n cls(buf, protocol).dump(obj)\n_pickle.PicklingError: Can\'t pickle <function <lambda> at 0x7f542fe07b00>: attribute lookup <lambda> on nomad.metainfo.metainfo failed\n',
'reason': 'Unexpected exception while handling your request'}}`
@thchang any idea if this could be a bug?
@himanel1, would you please be so kind in helping to describe the issue if I did not understand or describe something right?