Skip to content

Commit

Permalink
Merge branch 'master' into develop
Browse files Browse the repository at this point in the history
  • Loading branch information
solomon-negusse authored Aug 28, 2024
2 parents 9cadb7c + 0fb62e6 commit 17112fe
Show file tree
Hide file tree
Showing 4 changed files with 14 additions and 4 deletions.
2 changes: 2 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
FROM tiangolo/uvicorn-gunicorn-fastapi:python3.10-slim

# Comment to trigger an image rebuild

# Optional build argument for different environments
ARG ENV

Expand Down
2 changes: 1 addition & 1 deletion app/models/pydantic/jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ class GDALCOGJob(Job):
vcpus = 8
memory = 64000
num_processes = 8
attempts = 2
attempts = 5
attempt_duration_seconds = int(DEFAULT_JOB_DURATION * 1.5)


Expand Down
12 changes: 10 additions & 2 deletions batch/python/export_to_gee.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,13 @@
#!/usr/bin/env python

# Required options:
# --dataset {dataset}
# --implementation {implementation}
#
# Assumes that the cog file to be uploaded is available at {implementation}.tif
# Uploads the cog file to GCS at {GCS_BUCKET}/{dataset}/{implementation}.tif and
# creates a GEE asset that links to that GCS URI.

import json
import os

Expand Down Expand Up @@ -35,7 +43,7 @@ def upload_cog_to_gcs(dataset, implementation):
bucket = storage_client.bucket(GCS_BUCKET)
blob = bucket.blob(f"{dataset}/{implementation}.tif")

blob.upload_from_filename("cog.tif")
blob.upload_from_filename(f"{implementation}.tif")

return f"gs://{GCS_BUCKET}/{dataset}/{implementation}.tif"

Expand All @@ -44,7 +52,7 @@ def create_cog_backed_asset(dataset, implementation, gcs_path, service_account):
credentials = ee.ServiceAccountCredentials(service_account, GCS_CREDENTIALS_FILE)
ee.Initialize(credentials)

# delete any existing asset with the same dataset/implementatio
# delete any existing asset with the same dataset/implementation
try:
ee.data.deleteAsset(f"projects/{EE_PROJECT}/assets/{dataset}/{implementation}")
except ee.EEException:
Expand Down
2 changes: 1 addition & 1 deletion batch/scripts/cogify.sh
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ else
fi

# convert to COG using existing overviews, this adds some additional layout optimizations
gdal_translate "${IMPLEMENTATION}_merged.tif" "${IMPLEMENTATION}.tif" -of COG -co COMPRESS=DEFLATE -co BLOCKSIZE="${BLOCK_SIZE}" -co BIGTIFF=IF_SAFER -co NUM_THREADS=ALL_CPUS -co OVERVIEWS=FORCE_USE_EXISTING --config GDAL_CACHEMAX 70% --config GDAL_NUM_THREADS ALL_CPUS
gdal_translate "${IMPLEMENTATION}_merged.tif" "${IMPLEMENTATION}.tif" -of COG -co COMPRESS=DEFLATE -co BLOCKSIZE="${BLOCK_SIZE}" -co BIGTIFF=IF_SAFER -co NUM_THREADS=ALL_CPUS -co OVERVIEWS=FORCE_USE_EXISTING -co SPARSE_OK=TRUE --config GDAL_CACHEMAX 70% --config GDAL_NUM_THREADS ALL_CPUS

# upload to data lake
aws s3 cp "${IMPLEMENTATION}.tif" "${TARGET}"
Expand Down

0 comments on commit 17112fe

Please sign in to comment.