diff --git a/app/models/pydantic/query.py b/app/models/pydantic/query.py index 298427464..5504ee226 100644 --- a/app/models/pydantic/query.py +++ b/app/models/pydantic/query.py @@ -13,6 +13,7 @@ class QueryRequestIn(StrictBaseModel): class QueryBatchRequestIn(StrictBaseModel): feature_collection: Optional[FeatureCollection] uri: Optional[str] + id_field: str sql: str diff --git a/app/routes/datasets/queries.py b/app/routes/datasets/queries.py index 5e53b9423..650cc3535 100755 --- a/app/routes/datasets/queries.py +++ b/app/routes/datasets/queries.py @@ -20,7 +20,6 @@ from fastapi.logger import logger # from fastapi.openapi.models import APIKey -from fastapi.openapi.models import APIKey from fastapi.responses import ORJSONResponse, RedirectResponse from pglast import printers # noqa from pglast import Node, parse_sql @@ -29,7 +28,6 @@ from pydantic.tools import parse_obj_as from ...application import db -from ...authentication.api_keys import get_api_key from ...authentication.token import is_gfwpro_admin_for_query # from ...authentication.api_keys import get_api_key @@ -88,7 +86,7 @@ from ...settings.globals import ( GEOSTORE_SIZE_LIMIT_OTF, RASTER_ANALYSIS_LAMBDA_NAME, - STATE_MACHINE_ARN, + RASTER_ANALYSIS_STATE_MACHINE_ARN, ) from ...utils.aws import get_sfn_client, invoke_lambda from ...utils.geostore import get_geostore @@ -339,7 +337,7 @@ async def query_dataset_list_post( *, dataset_version: Tuple[str, str] = Depends(dataset_version_dependency), request: QueryBatchRequestIn, - api_key: APIKey = Depends(get_api_key), + # api_key: APIKey = Depends(get_api_key), ): """Execute a READ-ONLY SQL query on the given dataset version (if implemented).""" @@ -376,6 +374,7 @@ async def query_dataset_list_post( input = { "query": sql, + "id_field": request.id_field, "environment": data_environment.dict()["layers"], } @@ -401,8 +400,8 @@ async def query_dataset_list_post( async def _start_batch_execution(job_id: UUID, input: Dict[str, Any]) -> None: get_sfn_client().start_execution( - stateMachineArn=STATE_MACHINE_ARN, - name=job_id, + stateMachineArn=RASTER_ANALYSIS_STATE_MACHINE_ARN, + name=str(job_id), input=json.dumps(input), ) diff --git a/app/routes/jobs/job.py b/app/routes/jobs/job.py index 04611bbb2..23bc00447 100644 --- a/app/routes/jobs/job.py +++ b/app/routes/jobs/job.py @@ -13,7 +13,7 @@ from fastapi.responses import ORJSONResponse from ...models.pydantic.user_job import UserJob, UserJobResponse -from ...settings.globals import STATE_MACHINE_ARN +from ...settings.globals import RASTER_ANALYSIS_STATE_MACHINE_ARN from ...utils.aws import get_sfn_client router = APIRouter() @@ -63,9 +63,7 @@ async def _get_user_job(job_id: UUID) -> UserJob: async def _get_sfn_execution(job_id: UUID) -> Dict[str, Any]: - execution_arn = ( - f"{STATE_MACHINE_ARN.replace('stateMachines', 'execution')}:{job_id}" - ) + execution_arn = f"{RASTER_ANALYSIS_STATE_MACHINE_ARN.replace('stateMachines', 'execution')}:{str(job_id)}" execution = get_sfn_client().describe_execution(execution_arn) return execution diff --git a/app/settings/globals.py b/app/settings/globals.py index f4d448733..3fb816af2 100644 --- a/app/settings/globals.py +++ b/app/settings/globals.py @@ -191,5 +191,6 @@ # commercial datasets which shouldn't be downloaded in any way.) PROTECTED_QUERY_DATASETS = ["wdpa_licensed_protected_areas"] -# TODO mocking util we can grab this from terraform output -STATE_MACHINE_ARN = "" +RASTER_ANALYSIS_STATE_MACHINE_ARN = config( + "RASTER_ANALYSIS_STATE_MACHINE_ARN", cast=str, default=None +) diff --git a/scripts/infra b/scripts/infra index f973a724b..f93fb6002 100755 --- a/scripts/infra +++ b/scripts/infra @@ -4,5 +4,5 @@ set -e GIT_SHA=$(git rev-parse HEAD) -docker-compose -f terraform/docker/docker-compose.yml build -docker-compose -f terraform/docker/docker-compose.yml run --rm terraform "$@" -var="git_sha=${GIT_SHA}" +docker compose -f terraform/docker/docker-compose.yml build +docker compose -f terraform/docker/docker-compose.yml run --rm terraform "$@" -var="git_sha=${GIT_SHA}" diff --git a/scripts/test b/scripts/test index 7fcc16587..e6996d475 100755 --- a/scripts/test +++ b/scripts/test @@ -61,16 +61,16 @@ fi if [ "${BUILD}" = true ]; then docker build -t batch_gdal-python_test . -f batch/gdal-python.dockerfile - docker build -t batch_postgresql-client_test . -f batch/postgresql-client.dockerfile - docker build -t batch_tile_cache_test . -f batch/tile_cache.dockerfile - docker build -t pixetl_test . -f batch/pixetl.dockerfile - docker-compose -f docker-compose.test.yml --project-name gfw-data-api_test build --no-cache app_test + #docker build -t batch_postgresql-client_test . -f batch/postgresql-client.dockerfile + #docker build -t batch_tile_cache_test . -f batch/tile_cache.dockerfile + #docker build -t pixetl_test . -f batch/pixetl.dockerfile + docker compose -f docker-compose.test.yml --project-name gfw-data-api_test build --no-cache app_test fi set +e # Everything from "--cov-report on" become the arguments to the pytest run inside the docker. -docker-compose -f docker-compose.test.yml --project-name gfw-data-api_test run --rm --name app_test app_test --cov-report xml:/app/tests/cobertura.xml $HANGING $SLOW $DO_COV $DISABLE_WARNINGS $SHOW_STDOUT $args +docker compose -f docker-compose.test.yml --project-name gfw-data-api_test run --rm --name app_test app_test --cov-report xml:/app/tests/cobertura.xml $HANGING $SLOW $DO_COV $DISABLE_WARNINGS $SHOW_STDOUT $args exit_code=$? -docker-compose -f docker-compose.test.yml --project-name gfw-data-api_test down --remove-orphans +docker compose -f docker-compose.test.yml --project-name gfw-data-api_test down --remove-orphans exit $exit_code diff --git a/terraform/data.tf b/terraform/data.tf index 56b445327..34f3319fc 100644 --- a/terraform/data.tf +++ b/terraform/data.tf @@ -69,6 +69,7 @@ data "template_file" "container_definition" { pixetl_job_definition = module.batch_job_queues.pixetl_job_definition_arn pixetl_job_queue = module.batch_job_queues.pixetl_job_queue_arn raster_analysis_lambda_name = "raster-analysis-tiled_raster_analysis-default" + raster_analysis_sfn_arn = data.terraform_remote_state.raster_analysis_lambda.raster_analysis_state_machine_arn service_url = local.service_url rw_api_url = var.rw_api_url api_token_secret_arn = data.terraform_remote_state.core.outputs.secrets_read-gfw-api-token_arn diff --git a/terraform/templates/container_definition.json.tmpl b/terraform/templates/container_definition.json.tmpl index 1b5713fd7..f031b29ca 100644 --- a/terraform/templates/container_definition.json.tmpl +++ b/terraform/templates/container_definition.json.tmpl @@ -112,6 +112,10 @@ { "name": "NAME_SUFFIX", "value": "${name_suffix}" + }, + { + "name": "RASTER_ANALYSIS_STATE_MACHINE_ARN", + "value": "${raster_analysis_sfn_arn}" } ], "secrets": [ diff --git a/terraform/vars/terraform-dev.tfvars b/terraform/vars/terraform-dev.tfvars index 92024ec8d..5f9477624 100644 --- a/terraform/vars/terraform-dev.tfvars +++ b/terraform/vars/terraform-dev.tfvars @@ -5,7 +5,7 @@ rw_api_url = "https://api.resourcewatch.org" desired_count = 1 auto_scaling_min_capacity = 1 auto_scaling_max_capacity = 5 -lambda_analysis_workspace = "features-lat_lon" +lambda_analysis_workspace = "feature-otf_lists" key_pair = "dmannarino_gfw" create_cloudfront_distribution = false new_relic_license_key_arn = "arn:aws:secretsmanager:us-east-1:563860007740:secret:newrelic/license_key-lolw24"