Commit 2c948619 authored by David Sikter's avatar David Sikter
Browse files

Moves first_page_url and added functionality for generating the urls

parent a6baee28
Pipeline #97384 failed with stages
in 25 minutes and 36 seconds
...@@ -31,7 +31,7 @@ from nomad.utils import strip ...@@ -31,7 +31,7 @@ from nomad.utils import strip
from nomad.metainfo import Datetime, MEnum from nomad.metainfo import Datetime, MEnum
from nomad.metainfo.search_extension import metrics, search_quantities, search_sub_sections from nomad.metainfo.search_extension import metrics, search_quantities, search_sub_sections
from .utils import parameter_dependency_from_model from .utils import parameter_dependency_from_model, update_url_query_arguments
User = datamodel.User.m_def.a_pydantic.model User = datamodel.User.m_def.a_pydantic.model
...@@ -581,6 +581,10 @@ class PaginationResponse(Pagination): ...@@ -581,6 +581,10 @@ class PaginationResponse(Pagination):
None, description=strip(''' None, description=strip('''
The url to get the next page. The url to get the next page.
''')) '''))
first_page_url: Optional[str] = Field(
None, description=strip('''
The url to get the first page.
'''))
@validator('order_by') @validator('order_by')
def validate_order_by(cls, order_by): # pylint: disable=no-self-argument def validate_order_by(cls, order_by): # pylint: disable=no-self-argument
...@@ -602,16 +606,48 @@ class PaginationResponse(Pagination): ...@@ -602,16 +606,48 @@ class PaginationResponse(Pagination):
# No validation - behaviour of this field depends on api method # No validation - behaviour of this field depends on api method
return next_page_after_value return next_page_after_value
def populate_urls(self, request: Request):
'''
Populates the urls (`page_url`, `next_page_url`, `first_page_url` from the
request and `next_page_after_value`.
'''
original_url = str(request.url)
self.page_url = original_url
self.first_page_url = update_url_query_arguments(
original_url, page=None, page_after_value=None)
if self.next_page_after_value:
self.next_page_url = update_url_query_arguments(
original_url, page=None, page_after_value=self.next_page_after_value)
class IndexBasedPaginationResponse(PaginationResponse): class IndexBasedPaginationResponse(PaginationResponse):
prev_page_url: Optional[str] = Field( prev_page_url: Optional[str] = Field(
None, description=strip(''' None, description=strip('''
The url to get the previous page. The url to get the previous page.
''')) '''))
first_page_url: Optional[str] = Field(
None, description=strip(''' def populate_page_refs(self, request: Request):
The url to get the first page. '''
''')) Provided that `page` and `total` are populated, populates all other references:
`page_after_value`, `next_page_after_value`, `page_url`, `next_page_url`,
`prev_page_url`, and `first_page_url`.
'''
has_more_pages = self.total > self.page * self.page_size
self.page_after_value = str((self.page - 1) * self.page_size - 1) if self.page > 1 else None
self.next_page_after_value = str(self.page * self.page_size - 1) if has_more_pages else None
original_url = str(request.url)
self.page_url = original_url
self.first_page_url = update_url_query_arguments(
original_url, page=None, page_after_value=None)
if has_more_pages:
self.next_page_url = update_url_query_arguments(
original_url, page=self.page + 1, page_after_value=None)
if self.page > 1:
self.prev_page_url = update_url_query_arguments(
original_url, page=self.page - 1, page_after_value=None)
class EntryBasedPagination(Pagination): class EntryBasedPagination(Pagination):
......
...@@ -18,7 +18,8 @@ ...@@ -18,7 +18,8 @@
import re import re
from typing import cast, Optional, List from typing import cast, Optional, List
from fastapi import APIRouter, Depends, Query as FastApiQuery, Path, HTTPException, status from fastapi import (
APIRouter, Request, Depends, Query as FastApiQuery, Path, HTTPException, status)
from pydantic import BaseModel, Field, validator from pydantic import BaseModel, Field, validator
from datetime import datetime from datetime import datetime
import enum import enum
...@@ -121,6 +122,7 @@ class DatasetCreate(BaseModel): # type: ignore ...@@ -121,6 +122,7 @@ class DatasetCreate(BaseModel): # type: ignore
response_model_exclude_unset=True, response_model_exclude_unset=True,
response_model_exclude_none=True) response_model_exclude_none=True)
async def get_datasets( async def get_datasets(
request: Request,
dataset_id: str = FastApiQuery(None), dataset_id: str = FastApiQuery(None),
name: str = FastApiQuery(None), name: str = FastApiQuery(None),
user_id: str = FastApiQuery(None), user_id: str = FastApiQuery(None),
...@@ -143,10 +145,8 @@ async def get_datasets( ...@@ -143,10 +145,8 @@ async def get_datasets(
start = (pagination.page - 1) * pagination.page_size start = (pagination.page - 1) * pagination.page_size
end = start + pagination.page_size end = start + pagination.page_size
pagination_response = IndexBasedPaginationResponse( pagination_response = IndexBasedPaginationResponse(total=mongodb_query.count(), **pagination.dict())
total=mongodb_query.count(), pagination_response.populate_page_refs(request)
next_page_after_value=str(end - 1) if pagination.page != 1 and end < mongodb_query.count() else None,
**pagination.dict()) # type: ignore
return { return {
'pagination': pagination_response, 'pagination': pagination_response,
......
...@@ -17,7 +17,7 @@ ...@@ -17,7 +17,7 @@
# #
from typing import Dict, Iterator, Any, List, Set, cast from typing import Dict, Iterator, Any, List, Set, cast
from fastapi import APIRouter, Depends, Path, status, HTTPException from fastapi import APIRouter, Request, Depends, Path, status, HTTPException
from fastapi.responses import StreamingResponse from fastapi.responses import StreamingResponse
import os.path import os.path
import io import io
...@@ -101,6 +101,7 @@ def perform_search(*args, **kwargs): ...@@ -101,6 +101,7 @@ def perform_search(*args, **kwargs):
response_model_exclude_unset=True, response_model_exclude_unset=True,
response_model_exclude_none=True) response_model_exclude_none=True)
async def post_entries_metadata_query( async def post_entries_metadata_query(
request: Request,
data: EntriesMetadata, data: EntriesMetadata,
user: User = Depends(get_optional_user)): user: User = Depends(get_optional_user)):
...@@ -121,7 +122,7 @@ async def post_entries_metadata_query( ...@@ -121,7 +122,7 @@ async def post_entries_metadata_query(
and aggregated data over all search results. and aggregated data over all search results.
''' '''
return perform_search( res = perform_search(
owner=data.owner, owner=data.owner,
query=data.query, query=data.query,
pagination=data.pagination, pagination=data.pagination,
...@@ -129,6 +130,8 @@ async def post_entries_metadata_query( ...@@ -129,6 +130,8 @@ async def post_entries_metadata_query(
statistics=data.statistics, statistics=data.statistics,
aggregations=data.aggregations, aggregations=data.aggregations,
user_id=user.user_id if user is not None else None) user_id=user.user_id if user is not None else None)
res.pagination.populate_urls(request)
return res
@router.get( @router.get(
...@@ -139,6 +142,7 @@ async def post_entries_metadata_query( ...@@ -139,6 +142,7 @@ async def post_entries_metadata_query(
response_model_exclude_unset=True, response_model_exclude_unset=True,
response_model_exclude_none=True) response_model_exclude_none=True)
async def get_entries_metadata( async def get_entries_metadata(
request: Request,
with_query: WithQuery = Depends(query_parameters), with_query: WithQuery = Depends(query_parameters),
pagination: EntryPagination = Depends(entry_pagination_parameters), pagination: EntryPagination = Depends(entry_pagination_parameters),
required: MetadataRequired = Depends(metadata_required_parameters), required: MetadataRequired = Depends(metadata_required_parameters),
...@@ -155,10 +159,12 @@ async def get_entries_metadata( ...@@ -155,10 +159,12 @@ async def get_entries_metadata(
`gt`, `lt`, `lte`. `gt`, `lt`, `lte`.
''' '''
return perform_search( res = perform_search(
owner=with_query.owner, query=with_query.query, owner=with_query.owner, query=with_query.query,
pagination=pagination, required=required, pagination=pagination, required=required,
user_id=user.user_id if user is not None else None) user_id=user.user_id if user is not None else None)
res.pagination.populate_urls(request)
return res
def _do_exaustive_search(owner: Owner, query: Query, include: List[str], user: User) -> Iterator[Dict[str, Any]]: def _do_exaustive_search(owner: Owner, query: Query, include: List[str], user: User) -> Iterator[Dict[str, Any]]:
...@@ -349,10 +355,13 @@ _entries_raw_query_docstring = strip(''' ...@@ -349,10 +355,13 @@ _entries_raw_query_docstring = strip('''
responses=create_responses(_bad_owner_response), responses=create_responses(_bad_owner_response),
response_model_exclude_unset=True, response_model_exclude_unset=True,
response_model_exclude_none=True) response_model_exclude_none=True)
async def post_entries_raw_query(data: EntriesRaw, user: User = Depends(get_optional_user)): async def post_entries_raw_query(
request: Request, data: EntriesRaw, user: User = Depends(get_optional_user)):
return _answer_entries_raw_request( res = _answer_entries_raw_request(
owner=data.owner, query=data.query, pagination=data.pagination, user=user) owner=data.owner, query=data.query, pagination=data.pagination, user=user)
res.pagination.populate_urls(request)
return res
@router.get( @router.get(
...@@ -365,12 +374,15 @@ async def post_entries_raw_query(data: EntriesRaw, user: User = Depends(get_opti ...@@ -365,12 +374,15 @@ async def post_entries_raw_query(data: EntriesRaw, user: User = Depends(get_opti
response_model_exclude_none=True, response_model_exclude_none=True,
responses=create_responses(_bad_owner_response)) responses=create_responses(_bad_owner_response))
async def get_entries_raw( async def get_entries_raw(
request: Request,
with_query: WithQuery = Depends(query_parameters), with_query: WithQuery = Depends(query_parameters),
pagination: EntryPagination = Depends(entry_pagination_parameters), pagination: EntryPagination = Depends(entry_pagination_parameters),
user: User = Depends(get_optional_user)): user: User = Depends(get_optional_user)):
return _answer_entries_raw_request( res = _answer_entries_raw_request(
owner=with_query.owner, query=with_query.query, pagination=pagination, user=user) owner=with_query.owner, query=with_query.query, pagination=pagination, user=user)
res.pagination.populate_urls(request)
return res
_entries_raw_download_query_docstring = strip(''' _entries_raw_download_query_docstring = strip('''
...@@ -527,11 +539,13 @@ _entries_archive_docstring = strip(''' ...@@ -527,11 +539,13 @@ _entries_archive_docstring = strip('''
response_model_exclude_none=True, response_model_exclude_none=True,
responses=create_responses(_bad_owner_response, _bad_archive_required_response)) responses=create_responses(_bad_owner_response, _bad_archive_required_response))
async def post_entries_archive_query( async def post_entries_archive_query(
data: EntriesArchive, user: User = Depends(get_optional_user)): request: Request, data: EntriesArchive, user: User = Depends(get_optional_user)):
return _answer_entries_archive_request( res = _answer_entries_archive_request(
owner=data.owner, query=data.query, pagination=data.pagination, owner=data.owner, query=data.query, pagination=data.pagination,
required=data.required, user=user) required=data.required, user=user)
res.pagination.populate_urls(request)
return res
@router.get( @router.get(
...@@ -544,13 +558,16 @@ async def post_entries_archive_query( ...@@ -544,13 +558,16 @@ async def post_entries_archive_query(
response_model_exclude_none=True, response_model_exclude_none=True,
responses=create_responses(_bad_owner_response, _bad_archive_required_response)) responses=create_responses(_bad_owner_response, _bad_archive_required_response))
async def get_entries_archive_query( async def get_entries_archive_query(
request: Request,
with_query: WithQuery = Depends(query_parameters), with_query: WithQuery = Depends(query_parameters),
pagination: EntryPagination = Depends(entry_pagination_parameters), pagination: EntryPagination = Depends(entry_pagination_parameters),
user: User = Depends(get_optional_user)): user: User = Depends(get_optional_user)):
return _answer_entries_archive_request( res = _answer_entries_archive_request(
owner=with_query.owner, query=with_query.query, pagination=pagination, owner=with_query.owner, query=with_query.query, pagination=pagination,
required=None, user=user) required=None, user=user)
res.pagination.populate_urls(request)
return res
def _answer_entries_archive_download_request( def _answer_entries_archive_download_request(
......
...@@ -18,9 +18,10 @@ ...@@ -18,9 +18,10 @@
from typing import Dict, Iterator, Any from typing import Dict, Iterator, Any
from types import FunctionType from types import FunctionType
import urllib
import sys import sys
import inspect import inspect
from fastapi import Query, HTTPException # pylint: disable=unused-import from fastapi import Request, Query, HTTPException # pylint: disable=unused-import
from pydantic import ValidationError, BaseModel # pylint: disable=unused-import from pydantic import ValidationError, BaseModel # pylint: disable=unused-import
import zipstream import zipstream
...@@ -117,3 +118,21 @@ def create_responses(*args): ...@@ -117,3 +118,21 @@ def create_responses(*args):
return { return {
status_code: response status_code: response
for status_code, response in args} for status_code, response in args}
def update_url_query_arguments(original_url: str, **kwargs) -> str:
'''
Takes an url, and returns a new url, obtained by updating the query arguments in the
`original_url` as specified by the kwargs.
'''
scheme, netloc, path, params, query, fragment = urllib.parse.urlparse(original_url)
query_dict = urllib.parse.parse_qs(query)
for k, v in kwargs.items():
if v is None:
# Unset the value
if k in query_dict:
query_dict.pop(k)
else:
query_dict[k] = [str(v)]
query = urllib.parse.urlencode(query_dict, doseq=True)
return urllib.parse.urlunparse((scheme, netloc, path, params, query, fragment))
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment