Skip to content

Commit

Permalink
Use implementation name in S3 intermediates
Browse files Browse the repository at this point in the history
  • Loading branch information
jterry64 committed Jul 30, 2024
1 parent ce2b2cd commit 89c4441
Showing 1 changed file with 11 additions and 11 deletions.
22 changes: 11 additions & 11 deletions batch/scripts/cogify.sh
Original file line number Diff line number Diff line change
Expand Up @@ -16,32 +16,32 @@ ME=$(basename "$0")
set -x
# download all GeoTiff files

if [[ $(aws s3 ls "${PREFIX}/merged.tif") ]]; then
aws s3 cp "${PREFIX}/merged.tif" merged.tif
if [[ $(aws s3 ls "${PREFIX}/${IMPLEMENTATION}_merged.tif") ]]; then
aws s3 cp "${PREFIX}/${IMPLEMENTATION}_merged.tif" "${IMPLEMENTATION}_merged.tif"
else
aws s3 cp --recursive --exclude "*" --include "*.tif" "${SRC}" .

# create VRT of input files so we can use gdal_translate
gdalbuildvrt merged.vrt *.tif
gdalbuildvrt "${IMPLEMENTATION}_merged.vrt" *.tif

# merge all rasters into one huge raster using COG block size
gdal_translate -of GTiff -co TILED=YES -co BLOCKXSIZE="${BLOCK_SIZE}" -co BLOCKYSIZE="${BLOCK_SIZE}" -co COMPRESS=DEFLATE -co BIGTIFF=IF_SAFER -co NUM_THREADS=ALL_CPUS --config GDAL_CACHEMAX 70% --config GDAL_NUM_THREADS ALL_CPUS merged.vrt merged.tif
aws s3 cp merged.tif "${PREFIX}/merged.tif"
gdal_translate -of GTiff -co TILED=YES -co BLOCKXSIZE="${BLOCK_SIZE}" -co BLOCKYSIZE="${BLOCK_SIZE}" -co COMPRESS=DEFLATE -co BIGTIFF=IF_SAFER -co NUM_THREADS=ALL_CPUS --config GDAL_CACHEMAX 70% --config GDAL_NUM_THREADS ALL_CPUS "${IMPLEMENTATION}_merged.vrt" "${IMPLEMENTATION}_merged.tif"
aws s3 cp merged.tif "${PREFIX}/${IMPLEMENTATION}_merged.tif"
fi

if [[ $(aws s3 ls "${PREFIX}/merged.tif.ovr") ]]; then
aws s3 cp "${PREFIX}/merged.tif.ovr" merged.tif.ovr
if [[ $(aws s3 ls "${PREFIX}/${IMPLEMENTATION}_merged.tif.ovr") ]]; then
aws s3 cp "${PREFIX}/${IMPLEMENTATION}_merged.tif.ovr" "${IMPLEMENTATION}_merged.tif.ovr"
else
# generate overviews externally
gdaladdo merged.tif -r "${RESAMPLE}" -ro --config GDAL_NUM_THREADS ALL_CPUS --config GDAL_CACHEMAX 70% --config COMPRESS_OVERVIEW DEFLATE
aws s3 cp merged.tif.ovr "${PREFIX}/merged.tif.ovr"
gdaladdo "${IMPLEMENTATION}_merged.tif" -r "${RESAMPLE}" -ro --config GDAL_NUM_THREADS ALL_CPUS --config GDAL_CACHEMAX 70% --config COMPRESS_OVERVIEW DEFLATE
aws s3 cp merged.tif.ovr "${PREFIX}/${IMPLEMENTATION}_merged.tif.ovr"
fi

# convert to COG using existing overviews, this adds some additional layout optimizations
gdal_translate merged.tif cog.tif -of COG -co COMPRESS=DEFLATE -co BLOCKSIZE="${BLOCK_SIZE}" -co BIGTIFF=IF_SAFER -co NUM_THREADS=ALL_CPUS -co OVERVIEWS=FORCE_USE_EXISTING --config GDAL_CACHEMAX 70% --config GDAL_NUM_THREADS ALL_CPUS
gdal_translate "${IMPLEMENTATION}_merged.tif" "${IMPLEMENTATION}.tif" -of COG -co COMPRESS=DEFLATE -co BLOCKSIZE="${BLOCK_SIZE}" -co BIGTIFF=IF_SAFER -co NUM_THREADS=ALL_CPUS -co OVERVIEWS=FORCE_USE_EXISTING --config GDAL_CACHEMAX 70% --config GDAL_NUM_THREADS ALL_CPUS

# upload to data lake
aws s3 cp cog.tif "${TARGET}"
aws s3 cp "${IMPLEMENTATION}.tif" "${TARGET}"

# delete intermediate file
aws s3 rm "${PREFIX}/merged.tif"
Expand Down

0 comments on commit 89c4441

Please sign in to comment.