Skip to content

Commit

Permalink
refactor(routes): move query helper functions to utils
Browse files Browse the repository at this point in the history
  • Loading branch information
gtempus committed Apr 30, 2024
1 parent 97d02bb commit bc0e545
Show file tree
Hide file tree
Showing 4 changed files with 85 additions and 53 deletions.
2 changes: 1 addition & 1 deletion app/routes/datasets/downloads.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
from ...utils.geostore import get_geostore
from ...utils.path import split_s3_path
from .. import dataset_version_dependency
from .queries import _query_dataset_csv, _query_dataset_json
from ..utils.downloads import _query_dataset_csv, _query_dataset_json

router: APIRouter = APIRouter()

Expand Down
54 changes: 2 additions & 52 deletions app/routes/datasets/queries.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import csv
import re
from io import StringIO
from typing import Any, Dict, List, Optional, Tuple, Union, cast
from typing import Any, Dict, List, Optional, Tuple, Union
from urllib.parse import unquote
from uuid import UUID, uuid4

Expand All @@ -25,7 +25,6 @@
from ...authentication.token import is_gfwpro_admin_for_query
from ...application import db
# from ...authentication.api_keys import get_api_key
from ...crud import assets
from ...models.enum.assets import AssetType
from ...models.enum.creation_options import Delimiters
from ...models.enum.geostore import GeostoreOrigin
Expand Down Expand Up @@ -76,6 +75,7 @@
from ...utils.aws import invoke_lambda
from ...utils.geostore import get_geostore
from .. import dataset_version_dependency
from ..utils.downloads import _query_dataset_csv, _query_dataset_json

router = APIRouter()

Expand Down Expand Up @@ -305,56 +305,6 @@ async def query_dataset_csv_post(
return CSVStreamingResponse(iter([csv_data.getvalue()]), download=False)


async def _query_dataset_json(
dataset: str,
version: str,
sql: str,
geostore: Optional[GeostoreCommon],
) -> List[Dict[str, Any]]:
# Make sure we can query the dataset
default_asset: AssetORM = await assets.get_default_asset(dataset, version)
query_type = _get_query_type(default_asset, geostore)
if query_type == QueryType.table:
geometry = geostore.geojson if geostore else None
return await _query_table(dataset, version, sql, geometry)
elif query_type == QueryType.raster:
geostore = cast(GeostoreCommon, geostore)
results = await _query_raster(dataset, default_asset, sql, geostore)
return results["data"]
else:
raise HTTPException(
status_code=501,
detail="This endpoint is not implemented for the given dataset.",
)


async def _query_dataset_csv(
dataset: str,
version: str,
sql: str,
geostore: Optional[GeostoreCommon],
delimiter: Delimiters = Delimiters.comma,
) -> StringIO:
# Make sure we can query the dataset
default_asset: AssetORM = await assets.get_default_asset(dataset, version)
query_type = _get_query_type(default_asset, geostore)
if query_type == QueryType.table:
geometry = geostore.geojson if geostore else None
response = await _query_table(dataset, version, sql, geometry)
return _orm_to_csv(response, delimiter=delimiter)
elif query_type == QueryType.raster:
geostore = cast(GeostoreCommon, geostore)
results = await _query_raster(
dataset, default_asset, sql, geostore, QueryFormat.csv, delimiter
)
return StringIO(results["data"])
else:
raise HTTPException(
status_code=501,
detail="This endpoint is not implemented for the given dataset.",
)


def _get_query_type(default_asset: AssetORM, geostore: Optional[GeostoreCommon]):
if default_asset.asset_type in [
AssetType.geo_database_table,
Expand Down
Empty file added app/routes/utils/__init__.py
Empty file.
82 changes: 82 additions & 0 deletions app/routes/utils/queries.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
import csv
from io import StringIO
from typing import Any, Dict, List, Optional, cast

from fastapi import HTTPException

from ...crud import assets
from ...models.enum.creation_options import Delimiters
from ...models.enum.queries import QueryFormat, QueryType
from ...models.orm.assets import Asset as AssetORM
from ...models.pydantic.geostore import GeostoreCommon
from ..datasets.queries import _get_query_type, _query_raster, _query_table


async def _query_dataset_json(
dataset: str,
version: str,
sql: str,
geostore: Optional[GeostoreCommon],
) -> List[Dict[str, Any]]:
# Make sure we can query the dataset
default_asset: AssetORM = await assets.get_default_asset(dataset, version)
query_type = _get_query_type(default_asset, geostore)
if query_type == QueryType.table:
geometry = geostore.geojson if geostore else None
return await _query_table(dataset, version, sql, geometry)
elif query_type == QueryType.raster:
geostore = cast(GeostoreCommon, geostore)
results = await _query_raster(dataset, default_asset, sql, geostore)
return results["data"]
else:
raise HTTPException(
status_code=501,
detail="This endpoint is not implemented for the given dataset.",
)


async def _query_dataset_csv(
dataset: str,
version: str,
sql: str,
geostore: Optional[GeostoreCommon],
delimiter: Delimiters = Delimiters.comma,
) -> StringIO:
# Make sure we can query the dataset
default_asset: AssetORM = await assets.get_default_asset(dataset, version)
query_type = _get_query_type(default_asset, geostore)
if query_type == QueryType.table:
geometry = geostore.geojson if geostore else None
response = await _query_table(dataset, version, sql, geometry)
return _orm_to_csv(response, delimiter=delimiter)
elif query_type == QueryType.raster:
geostore = cast(GeostoreCommon, geostore)
results = await _query_raster(
dataset, default_asset, sql, geostore, QueryFormat.csv, delimiter
)
return StringIO(results["data"])
else:
raise HTTPException(
status_code=501,
detail="This endpoint is not implemented for the given dataset.",
)


def _orm_to_csv(
data: List[Dict[str, Any]], delimiter: Delimiters = Delimiters.comma
) -> StringIO:
"""Create a new csv file that represents generated data.
Response will return a temporary redirect to download URL.
"""
csv_file = StringIO()

if data:
wr = csv.writer(csv_file, quoting=csv.QUOTE_NONNUMERIC, delimiter=delimiter)
field_names = data[0].keys()
wr.writerow(field_names)
for row in data:
wr.writerow(row.values())
csv_file.seek(0)

return csv_file

0 comments on commit bc0e545

Please sign in to comment.