diff --git a/app/models/pydantic/user_job.py b/app/models/pydantic/user_job.py index 37ffbb1b6..698557325 100644 --- a/app/models/pydantic/user_job.py +++ b/app/models/pydantic/user_job.py @@ -8,7 +8,7 @@ class UserJob(BaseModel): job_id: UUID - job_link: str = "" + job_link: Optional[str] status: str = "pending" download_link: Optional[str] = None failed_geometries_link: Optional[str] = None diff --git a/app/routes/datasets/queries.py b/app/routes/datasets/queries.py index deddfff5f..51ebdfe81 100755 --- a/app/routes/datasets/queries.py +++ b/app/routes/datasets/queries.py @@ -24,6 +24,7 @@ from pglast.parser import ParseError from pglast.printer import RawStream from pydantic.tools import parse_obj_as +from app.settings.globals import API_URL from ...application import db from ...authentication.api_keys import get_api_key @@ -419,7 +420,8 @@ async def query_dataset_list_post( logger.error(error) return HTTPException(500, "There was an error starting your job.") - return UserJobResponse(data=UserJob(job_id=job_id)) + job_link = f"{API_URL}/job/{job_id}" + return UserJobResponse(data=UserJob(job_id=job_id, job_link=job_link)) async def _start_batch_execution(job_id: UUID, input: Dict[str, Any]) -> None: diff --git a/app/routes/jobs/job.py b/app/routes/jobs/job.py index a64bcb30f..66dc9003e 100644 --- a/app/routes/jobs/job.py +++ b/app/routes/jobs/job.py @@ -17,7 +17,6 @@ from ...settings.globals import RASTER_ANALYSIS_STATE_MACHINE_ARN from ...utils.aws import get_sfn_client from ..datasets import _get_presigned_url_from_path -from app.settings.globals import API_URL router = APIRouter() @@ -42,7 +41,6 @@ async def get_job(*, job_id: UUID = Path(...)) -> UserJobResponse: async def _get_user_job(job_id: UUID) -> UserJob: execution = await _get_sfn_execution(job_id) - job_link = f"{API_URL}/job/{job_id}" if execution["status"] == "SUCCEEDED": output = ( @@ -70,7 +68,6 @@ async def _get_user_job(job_id: UUID) -> UserJob: logger.error(f"Analysis service returned an unexpected response: {output}") return UserJob( job_id=job_id, - job_link=job_link, status="failed", download_link=None, failed_geometries_link=None, @@ -79,7 +76,6 @@ async def _get_user_job(job_id: UUID) -> UserJob: return UserJob( job_id=job_id, - job_link=job_link, status=output["status"], download_link=download_link, failed_geometries_link=failed_geometries_link, @@ -89,7 +85,6 @@ async def _get_user_job(job_id: UUID) -> UserJob: elif execution["status"] == "RUNNING": return UserJob( job_id=job_id, - job_link=job_link, status="pending", download_link=None, failed_geometries_link=None, @@ -98,7 +93,6 @@ async def _get_user_job(job_id: UUID) -> UserJob: else: return UserJob( job_id=job_id, - job_link=job_link, status="failed", download_link=None, failed_geometries_link=None, diff --git a/tests_v2/unit/app/routes/datasets/test_query.py b/tests_v2/unit/app/routes/datasets/test_query.py index 52fec3683..254bc9b90 100755 --- a/tests_v2/unit/app/routes/datasets/test_query.py +++ b/tests_v2/unit/app/routes/datasets/test_query.py @@ -703,6 +703,7 @@ async def test_query_batch_feature_collection( assert False assert str(uuid) == data["job_id"] + assert data["job_link"].endswith(f"/job/{data['job_id']}") assert data["status"] == "pending" diff --git a/tests_v2/unit/app/routes/jobs/test_job.py b/tests_v2/unit/app/routes/jobs/test_job.py index c4ffe7fe6..4adab7633 100644 --- a/tests_v2/unit/app/routes/jobs/test_job.py +++ b/tests_v2/unit/app/routes/jobs/test_job.py @@ -145,7 +145,6 @@ async def test_job_success( data = resp.json()["data"] assert data["job_id"] == TEST_JOB_ID - assert data["job_link"].endswith(f"/job/{TEST_JOB_ID}") assert data["status"] == "success" assert "test/results.csv" in data["download_link"] assert data["failed_geometries_link"] is None