Skip to content

Commit

Permalink
Connect with raster analysis step function
Browse files Browse the repository at this point in the history
  • Loading branch information
jterry64 committed Jul 30, 2024
1 parent 6e2c1e6 commit 3d0498d
Show file tree
Hide file tree
Showing 9 changed files with 25 additions and 21 deletions.
1 change: 1 addition & 0 deletions app/models/pydantic/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ class QueryRequestIn(StrictBaseModel):
class QueryBatchRequestIn(StrictBaseModel):
feature_collection: Optional[FeatureCollection]
uri: Optional[str]
id_field: str
sql: str


Expand Down
11 changes: 5 additions & 6 deletions app/routes/datasets/queries.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
from fastapi.logger import logger

# from fastapi.openapi.models import APIKey
from fastapi.openapi.models import APIKey
from fastapi.responses import ORJSONResponse, RedirectResponse
from pglast import printers # noqa
from pglast import Node, parse_sql
Expand All @@ -29,7 +28,6 @@
from pydantic.tools import parse_obj_as

from ...application import db
from ...authentication.api_keys import get_api_key
from ...authentication.token import is_gfwpro_admin_for_query

# from ...authentication.api_keys import get_api_key
Expand Down Expand Up @@ -88,7 +86,7 @@
from ...settings.globals import (
GEOSTORE_SIZE_LIMIT_OTF,
RASTER_ANALYSIS_LAMBDA_NAME,
STATE_MACHINE_ARN,
RASTER_ANALYSIS_STATE_MACHINE_ARN,
)
from ...utils.aws import get_sfn_client, invoke_lambda
from ...utils.geostore import get_geostore
Expand Down Expand Up @@ -339,7 +337,7 @@ async def query_dataset_list_post(
*,
dataset_version: Tuple[str, str] = Depends(dataset_version_dependency),
request: QueryBatchRequestIn,
api_key: APIKey = Depends(get_api_key),
# api_key: APIKey = Depends(get_api_key),
):
"""Execute a READ-ONLY SQL query on the given dataset version (if
implemented)."""
Expand Down Expand Up @@ -376,6 +374,7 @@ async def query_dataset_list_post(

input = {
"query": sql,
"id_field": request.id_field,
"environment": data_environment.dict()["layers"],
}

Expand All @@ -401,8 +400,8 @@ async def query_dataset_list_post(

async def _start_batch_execution(job_id: UUID, input: Dict[str, Any]) -> None:
get_sfn_client().start_execution(
stateMachineArn=STATE_MACHINE_ARN,
name=job_id,
stateMachineArn=RASTER_ANALYSIS_STATE_MACHINE_ARN,
name=str(job_id),
input=json.dumps(input),
)

Expand Down
6 changes: 2 additions & 4 deletions app/routes/jobs/job.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from fastapi.responses import ORJSONResponse

from ...models.pydantic.user_job import UserJob, UserJobResponse
from ...settings.globals import STATE_MACHINE_ARN
from ...settings.globals import RASTER_ANALYSIS_STATE_MACHINE_ARN
from ...utils.aws import get_sfn_client

router = APIRouter()
Expand Down Expand Up @@ -63,9 +63,7 @@ async def _get_user_job(job_id: UUID) -> UserJob:


async def _get_sfn_execution(job_id: UUID) -> Dict[str, Any]:
execution_arn = (
f"{STATE_MACHINE_ARN.replace('stateMachines', 'execution')}:{job_id}"
)
execution_arn = f"{RASTER_ANALYSIS_STATE_MACHINE_ARN.replace('stateMachines', 'execution')}:{str(job_id)}"
execution = get_sfn_client().describe_execution(execution_arn)
return execution

Expand Down
5 changes: 3 additions & 2 deletions app/settings/globals.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,5 +191,6 @@
# commercial datasets which shouldn't be downloaded in any way.)
PROTECTED_QUERY_DATASETS = ["wdpa_licensed_protected_areas"]

# TODO mocking util we can grab this from terraform output
STATE_MACHINE_ARN = ""
RASTER_ANALYSIS_STATE_MACHINE_ARN = config(
"RASTER_ANALYSIS_STATE_MACHINE_ARN", cast=str, default=None
)
4 changes: 2 additions & 2 deletions scripts/infra
Original file line number Diff line number Diff line change
Expand Up @@ -4,5 +4,5 @@ set -e

GIT_SHA=$(git rev-parse HEAD)

docker-compose -f terraform/docker/docker-compose.yml build
docker-compose -f terraform/docker/docker-compose.yml run --rm terraform "$@" -var="git_sha=${GIT_SHA}"
docker compose -f terraform/docker/docker-compose.yml build
docker compose -f terraform/docker/docker-compose.yml run --rm terraform "$@" -var="git_sha=${GIT_SHA}"
12 changes: 6 additions & 6 deletions scripts/test
Original file line number Diff line number Diff line change
Expand Up @@ -61,16 +61,16 @@ fi

if [ "${BUILD}" = true ]; then
docker build -t batch_gdal-python_test . -f batch/gdal-python.dockerfile
docker build -t batch_postgresql-client_test . -f batch/postgresql-client.dockerfile
docker build -t batch_tile_cache_test . -f batch/tile_cache.dockerfile
docker build -t pixetl_test . -f batch/pixetl.dockerfile
docker-compose -f docker-compose.test.yml --project-name gfw-data-api_test build --no-cache app_test
#docker build -t batch_postgresql-client_test . -f batch/postgresql-client.dockerfile
#docker build -t batch_tile_cache_test . -f batch/tile_cache.dockerfile
#docker build -t pixetl_test . -f batch/pixetl.dockerfile
docker compose -f docker-compose.test.yml --project-name gfw-data-api_test build --no-cache app_test
fi

set +e

# Everything from "--cov-report on" become the arguments to the pytest run inside the docker.
docker-compose -f docker-compose.test.yml --project-name gfw-data-api_test run --rm --name app_test app_test --cov-report xml:/app/tests/cobertura.xml $HANGING $SLOW $DO_COV $DISABLE_WARNINGS $SHOW_STDOUT $args
docker compose -f docker-compose.test.yml --project-name gfw-data-api_test run --rm --name app_test app_test --cov-report xml:/app/tests/cobertura.xml $HANGING $SLOW $DO_COV $DISABLE_WARNINGS $SHOW_STDOUT $args
exit_code=$?
docker-compose -f docker-compose.test.yml --project-name gfw-data-api_test down --remove-orphans
docker compose -f docker-compose.test.yml --project-name gfw-data-api_test down --remove-orphans
exit $exit_code
1 change: 1 addition & 0 deletions terraform/data.tf
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@ data "template_file" "container_definition" {
pixetl_job_definition = module.batch_job_queues.pixetl_job_definition_arn
pixetl_job_queue = module.batch_job_queues.pixetl_job_queue_arn
raster_analysis_lambda_name = "raster-analysis-tiled_raster_analysis-default"
raster_analysis_sfn_arn = data.terraform_remote_state.raster_analysis_lambda.raster_analysis_state_machine_arn
service_url = local.service_url
rw_api_url = var.rw_api_url
api_token_secret_arn = data.terraform_remote_state.core.outputs.secrets_read-gfw-api-token_arn
Expand Down
4 changes: 4 additions & 0 deletions terraform/templates/container_definition.json.tmpl
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,10 @@
{
"name": "NAME_SUFFIX",
"value": "${name_suffix}"
},
{
"name": "RASTER_ANALYSIS_STATE_MACHINE_ARN",
"value": "${raster_analysis_sfn_arn}"
}
],
"secrets": [
Expand Down
2 changes: 1 addition & 1 deletion terraform/vars/terraform-dev.tfvars
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ rw_api_url = "https://api.resourcewatch.org"
desired_count = 1
auto_scaling_min_capacity = 1
auto_scaling_max_capacity = 5
lambda_analysis_workspace = "features-lat_lon"
lambda_analysis_workspace = "feature-otf_lists"
key_pair = "dmannarino_gfw"
create_cloudfront_distribution = false
new_relic_license_key_arn = "arn:aws:secretsmanager:us-east-1:563860007740:secret:newrelic/license_key-lolw24"
Expand Down

0 comments on commit 3d0498d

Please sign in to comment.