Commit 2c948619 authored by David Sikter's avatar David Sikter
Browse files

Moves first_page_url and added functionality for generating the urls

parent a6baee28
Pipeline #97384 failed with stages
in 25 minutes and 36 seconds
......@@ -31,7 +31,7 @@ from nomad.utils import strip
from nomad.metainfo import Datetime, MEnum
from nomad.metainfo.search_extension import metrics, search_quantities, search_sub_sections
from .utils import parameter_dependency_from_model
from .utils import parameter_dependency_from_model, update_url_query_arguments
User = datamodel.User.m_def.a_pydantic.model
......@@ -581,6 +581,10 @@ class PaginationResponse(Pagination):
None, description=strip('''
The url to get the next page.
'''))
first_page_url: Optional[str] = Field(
None, description=strip('''
The url to get the first page.
'''))
@validator('order_by')
def validate_order_by(cls, order_by): # pylint: disable=no-self-argument
......@@ -602,16 +606,48 @@ class PaginationResponse(Pagination):
# No validation - behaviour of this field depends on api method
return next_page_after_value
def populate_urls(self, request: Request):
'''
Populates the urls (`page_url`, `next_page_url`, `first_page_url` from the
request and `next_page_after_value`.
'''
original_url = str(request.url)
self.page_url = original_url
self.first_page_url = update_url_query_arguments(
original_url, page=None, page_after_value=None)
if self.next_page_after_value:
self.next_page_url = update_url_query_arguments(
original_url, page=None, page_after_value=self.next_page_after_value)
class IndexBasedPaginationResponse(PaginationResponse):
prev_page_url: Optional[str] = Field(
None, description=strip('''
The url to get the previous page.
'''))
first_page_url: Optional[str] = Field(
None, description=strip('''
The url to get the first page.
'''))
def populate_page_refs(self, request: Request):
'''
Provided that `page` and `total` are populated, populates all other references:
`page_after_value`, `next_page_after_value`, `page_url`, `next_page_url`,
`prev_page_url`, and `first_page_url`.
'''
has_more_pages = self.total > self.page * self.page_size
self.page_after_value = str((self.page - 1) * self.page_size - 1) if self.page > 1 else None
self.next_page_after_value = str(self.page * self.page_size - 1) if has_more_pages else None
original_url = str(request.url)
self.page_url = original_url
self.first_page_url = update_url_query_arguments(
original_url, page=None, page_after_value=None)
if has_more_pages:
self.next_page_url = update_url_query_arguments(
original_url, page=self.page + 1, page_after_value=None)
if self.page > 1:
self.prev_page_url = update_url_query_arguments(
original_url, page=self.page - 1, page_after_value=None)
class EntryBasedPagination(Pagination):
......
......@@ -18,7 +18,8 @@
import re
from typing import cast, Optional, List
from fastapi import APIRouter, Depends, Query as FastApiQuery, Path, HTTPException, status
from fastapi import (
APIRouter, Request, Depends, Query as FastApiQuery, Path, HTTPException, status)
from pydantic import BaseModel, Field, validator
from datetime import datetime
import enum
......@@ -121,6 +122,7 @@ class DatasetCreate(BaseModel): # type: ignore
response_model_exclude_unset=True,
response_model_exclude_none=True)
async def get_datasets(
request: Request,
dataset_id: str = FastApiQuery(None),
name: str = FastApiQuery(None),
user_id: str = FastApiQuery(None),
......@@ -143,10 +145,8 @@ async def get_datasets(
start = (pagination.page - 1) * pagination.page_size
end = start + pagination.page_size
pagination_response = IndexBasedPaginationResponse(
total=mongodb_query.count(),
next_page_after_value=str(end - 1) if pagination.page != 1 and end < mongodb_query.count() else None,
**pagination.dict()) # type: ignore
pagination_response = IndexBasedPaginationResponse(total=mongodb_query.count(), **pagination.dict())
pagination_response.populate_page_refs(request)
return {
'pagination': pagination_response,
......
......@@ -17,7 +17,7 @@
#
from typing import Dict, Iterator, Any, List, Set, cast
from fastapi import APIRouter, Depends, Path, status, HTTPException
from fastapi import APIRouter, Request, Depends, Path, status, HTTPException
from fastapi.responses import StreamingResponse
import os.path
import io
......@@ -101,6 +101,7 @@ def perform_search(*args, **kwargs):
response_model_exclude_unset=True,
response_model_exclude_none=True)
async def post_entries_metadata_query(
request: Request,
data: EntriesMetadata,
user: User = Depends(get_optional_user)):
......@@ -121,7 +122,7 @@ async def post_entries_metadata_query(
and aggregated data over all search results.
'''
return perform_search(
res = perform_search(
owner=data.owner,
query=data.query,
pagination=data.pagination,
......@@ -129,6 +130,8 @@ async def post_entries_metadata_query(
statistics=data.statistics,
aggregations=data.aggregations,
user_id=user.user_id if user is not None else None)
res.pagination.populate_urls(request)
return res
@router.get(
......@@ -139,6 +142,7 @@ async def post_entries_metadata_query(
response_model_exclude_unset=True,
response_model_exclude_none=True)
async def get_entries_metadata(
request: Request,
with_query: WithQuery = Depends(query_parameters),
pagination: EntryPagination = Depends(entry_pagination_parameters),
required: MetadataRequired = Depends(metadata_required_parameters),
......@@ -155,10 +159,12 @@ async def get_entries_metadata(
`gt`, `lt`, `lte`.
'''
return perform_search(
res = perform_search(
owner=with_query.owner, query=with_query.query,
pagination=pagination, required=required,
user_id=user.user_id if user is not None else None)
res.pagination.populate_urls(request)
return res
def _do_exaustive_search(owner: Owner, query: Query, include: List[str], user: User) -> Iterator[Dict[str, Any]]:
......@@ -349,10 +355,13 @@ _entries_raw_query_docstring = strip('''
responses=create_responses(_bad_owner_response),
response_model_exclude_unset=True,
response_model_exclude_none=True)
async def post_entries_raw_query(data: EntriesRaw, user: User = Depends(get_optional_user)):
async def post_entries_raw_query(
request: Request, data: EntriesRaw, user: User = Depends(get_optional_user)):
return _answer_entries_raw_request(
res = _answer_entries_raw_request(
owner=data.owner, query=data.query, pagination=data.pagination, user=user)
res.pagination.populate_urls(request)
return res
@router.get(
......@@ -365,12 +374,15 @@ async def post_entries_raw_query(data: EntriesRaw, user: User = Depends(get_opti
response_model_exclude_none=True,
responses=create_responses(_bad_owner_response))
async def get_entries_raw(
request: Request,
with_query: WithQuery = Depends(query_parameters),
pagination: EntryPagination = Depends(entry_pagination_parameters),
user: User = Depends(get_optional_user)):
return _answer_entries_raw_request(
res = _answer_entries_raw_request(
owner=with_query.owner, query=with_query.query, pagination=pagination, user=user)
res.pagination.populate_urls(request)
return res
_entries_raw_download_query_docstring = strip('''
......@@ -527,11 +539,13 @@ _entries_archive_docstring = strip('''
response_model_exclude_none=True,
responses=create_responses(_bad_owner_response, _bad_archive_required_response))
async def post_entries_archive_query(
data: EntriesArchive, user: User = Depends(get_optional_user)):
request: Request, data: EntriesArchive, user: User = Depends(get_optional_user)):
return _answer_entries_archive_request(
res = _answer_entries_archive_request(
owner=data.owner, query=data.query, pagination=data.pagination,
required=data.required, user=user)
res.pagination.populate_urls(request)
return res
@router.get(
......@@ -544,13 +558,16 @@ async def post_entries_archive_query(
response_model_exclude_none=True,
responses=create_responses(_bad_owner_response, _bad_archive_required_response))
async def get_entries_archive_query(
request: Request,
with_query: WithQuery = Depends(query_parameters),
pagination: EntryPagination = Depends(entry_pagination_parameters),
user: User = Depends(get_optional_user)):
return _answer_entries_archive_request(
res = _answer_entries_archive_request(
owner=with_query.owner, query=with_query.query, pagination=pagination,
required=None, user=user)
res.pagination.populate_urls(request)
return res
def _answer_entries_archive_download_request(
......
......@@ -18,9 +18,10 @@
from typing import Dict, Iterator, Any
from types import FunctionType
import urllib
import sys
import inspect
from fastapi import Query, HTTPException # pylint: disable=unused-import
from fastapi import Request, Query, HTTPException # pylint: disable=unused-import
from pydantic import ValidationError, BaseModel # pylint: disable=unused-import
import zipstream
......@@ -117,3 +118,21 @@ def create_responses(*args):
return {
status_code: response
for status_code, response in args}
def update_url_query_arguments(original_url: str, **kwargs) -> str:
'''
Takes an url, and returns a new url, obtained by updating the query arguments in the
`original_url` as specified by the kwargs.
'''
scheme, netloc, path, params, query, fragment = urllib.parse.urlparse(original_url)
query_dict = urllib.parse.parse_qs(query)
for k, v in kwargs.items():
if v is None:
# Unset the value
if k in query_dict:
query_dict.pop(k)
else:
query_dict[k] = [str(v)]
query = urllib.parse.urlencode(query_dict, doseq=True)
return urllib.parse.urlunparse((scheme, netloc, path, params, query, fragment))
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment