Commit 0dd32b91 authored by Markus Scheidgen's avatar Markus Scheidgen
Browse files

Make app_fastapi a proper python package and fix linting issues.

parent 85cf733c
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
# limitations under the License. # limitations under the License.
# #
from typing import List, Dict, Optional, Union, Any from typing import List, Dict, Optional, Union, Any, Mapping
import enum import enum
from fastapi import Body, Request, HTTPException, Query as FastApiQuery from fastapi import Body, Request, HTTPException, Query as FastApiQuery
import pydantic import pydantic
...@@ -139,7 +139,7 @@ ops = { ...@@ -139,7 +139,7 @@ ops = {
QueryParameterValue = Union[Value, List[Value], Lte, Lt, Gte, Gt, Any_, All, None_] QueryParameterValue = Union[Value, List[Value], Lte, Lt, Gte, Gt, Any_, All, None_]
Query = Union[ Query = Union[
Dict[str, QueryParameterValue], And, Or, Not] Mapping[str, QueryParameterValue], And, Or, Not]
And.update_forward_refs() And.update_forward_refs()
......
import os from typing import Tuple, List, Union, Dict, Set
from typing import Tuple, List, Union, Dict, Any, Set
import mongomock
import pymongo.collection
from fastapi import HTTPException from fastapi import HTTPException
from elasticsearch_dsl import Search, Q from elasticsearch_dsl import Search, Q
from urllib.parse import urlparse
from optimade.filterparser import LarkParser from optimade.filterparser import LarkParser
from optimade.filtertransformers.elasticsearch import ElasticTransformer
from optimade.models import EntryResource
from optimade.server.config import CONFIG
from optimade.server.entry_collections import EntryCollection from optimade.server.entry_collections import EntryCollection
from optimade.server.logger import LOGGER
from optimade.server.mappers import BaseResourceMapper
from optimade.server.query_params import EntryListingQueryParams, SingleEntryQueryParams from optimade.server.query_params import EntryListingQueryParams, SingleEntryQueryParams
from optimade.server.exceptions import BadRequest from optimade.server.exceptions import BadRequest
from optimade.server.mappers import StructureMapper from optimade.server.mappers import StructureMapper
from optimade.models import StructureResource from optimade.models import StructureResource
from nomad import config, datamodel, files, search, utils from nomad import config, datamodel, files, search, utils
from nomad.normalizing.optimade import ( from nomad.normalizing.optimade import (
optimade_chemical_formula_reduced, optimade_chemical_formula_anonymous, optimade_chemical_formula_reduced, optimade_chemical_formula_anonymous,
...@@ -94,7 +82,8 @@ class ElasticsearchStructureCollection(EntryCollection): ...@@ -94,7 +82,8 @@ class ElasticsearchStructureCollection(EntryCollection):
logger.error('could not parse optimade filter', filter=filter_param) logger.error('could not parse optimade filter', filter=filter_param)
raise NotImplementedError( raise NotImplementedError(
'some features used in filter query %s are not implemented' % filter_param) 'some features used in filter query %s are not implemented' % filter_param)
elif filter != {}:
if filter != {}:
search_request.query(filter) search_request.query(filter)
es_response = search_request.execute_paginated( es_response = search_request.execute_paginated(
......
import logging
from nomad import utils from nomad import utils
LOGGER = utils.get_logger('optimade') LOGGER = utils.get_logger('optimade')
...@@ -25,14 +25,13 @@ import enum ...@@ -25,14 +25,13 @@ import enum
from nomad import utils, datamodel from nomad import utils, datamodel
from nomad.utils import strip, create_uuid from nomad.utils import strip, create_uuid
from nomad.datamodel import Dataset as DatasetDefinitionCls from nomad.datamodel import Dataset as DatasetDefinitionCls
from nomad.search import search
from nomad.doi import DOI from nomad.doi import DOI
from nomad.app_fastapi.routers.auth import get_required_user from nomad.app_fastapi.routers.auth import get_required_user
from nomad.app_fastapi.utils import create_responses from nomad.app_fastapi.utils import create_responses
from nomad.app_fastapi.models import ( from nomad.app_fastapi.models import (
pagination_parameters, Pagination, PaginationResponse, Query, pagination_parameters, Pagination, PaginationResponse, Query,
HTTPExceptionModel, User, Direction, MetadataRequired) HTTPExceptionModel, User, Direction, Owner)
from .entries import _do_exaustive_search from .entries import _do_exaustive_search
...@@ -212,7 +211,7 @@ async def post_datasets( ...@@ -212,7 +211,7 @@ async def post_datasets(
# get all entry ids # get all entry ids
if create.query is not None: if create.query is not None:
entries = _do_exaustive_search( entries = _do_exaustive_search(
owner='public', query=create.query, user=user, owner=Owner.public, query=create.query, user=user,
include=['calc_id']) include=['calc_id'])
dataset.entries = [entry['calc_id'] for entry in entries] dataset.entries = [entry['calc_id'] for entry in entries]
elif create.entries is not None: elif create.entries is not None:
......
...@@ -737,7 +737,7 @@ async def get_entry_raw_download( ...@@ -737,7 +737,7 @@ async def get_entry_raw_download(
status_code=status.HTTP_404_NOT_FOUND, status_code=status.HTTP_404_NOT_FOUND,
detail='The entry with the given id does not exist or is not visible to you.') detail='The entry with the given id does not exist or is not visible to you.')
return _answer_entries_raw_download_request(owner='public', query=query, files=files, user=user) return _answer_entries_raw_download_request(owner=Owner.public, query=query, files=files, user=user)
@router.get( @router.get(
......
...@@ -595,7 +595,7 @@ def query_archive( ...@@ -595,7 +595,7 @@ def query_archive(
def filter_archive( def filter_archive(
required: Dict[str, Any], archive_item: Union[Dict, ArchiveObject], required: Union[str, Dict[str, Any]], archive_item: Union[Dict, ArchiveObject],
transform: Callable) -> Dict: transform: Callable) -> Dict:
def _fix_index(index, length): def _fix_index(index, length):
......
...@@ -20,7 +20,7 @@ ...@@ -20,7 +20,7 @@
This module represents calculations in elastic search. This module represents calculations in elastic search.
''' '''
from typing import Iterable, Dict, List, Any from typing import cast, Iterable, Dict, List, Any
from elasticsearch_dsl import Search, Q, A, analyzer, tokenizer from elasticsearch_dsl import Search, Q, A, analyzer, tokenizer
import elasticsearch.helpers import elasticsearch.helpers
from elasticsearch.exceptions import NotFoundError, RequestError from elasticsearch.exceptions import NotFoundError, RequestError
...@@ -1110,15 +1110,15 @@ def search( ...@@ -1110,15 +1110,15 @@ def search(
# statistics # statistics
if len(statistics) > 0: if len(statistics) > 0:
more_response_data['statistics'] = { more_response_data['statistics'] = cast(Dict[str, Any], {
name: _es_to_api_statistics(es_response, name, statistic) name: _es_to_api_statistics(es_response, name, statistic)
for name, statistic in statistics.items()} for name, statistic in statistics.items()})
# aggregations # aggregations
if len(aggregations) > 0: if len(aggregations) > 0:
more_response_data['aggregations'] = { more_response_data['aggregations'] = cast(Dict[str, Any], {
name: _es_to_api_aggregation(es_response, name, aggregation) name: _es_to_api_aggregation(es_response, name, aggregation)
for name, aggregation in aggregations.items()} for name, aggregation in aggregations.items()})
more_response_data['es_query'] = es_query.to_dict() more_response_data['es_query'] = es_query.to_dict()
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment