Skip to content

Commit

Permalink
Updates Case Study notebooks to make sure collections work as needed.
Browse files Browse the repository at this point in the history
  • Loading branch information
dhavide committed Oct 31, 2024
1 parent fff2072 commit 17cc174
Show file tree
Hide file tree
Showing 7 changed files with 487 additions and 128 deletions.
92 changes: 73 additions & 19 deletions 04_Case_Studies/1a-Flooding-case-study.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,11 @@
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"metadata": {
"jupyter": {
"source_hidden": true
}
},
"outputs": [],
"source": [
"from warnings import filterwarnings\n",
Expand Down Expand Up @@ -55,7 +59,11 @@
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"metadata": {
"jupyter": {
"source_hidden": true
}
},
"outputs": [],
"source": [
"# Study location\n",
Expand All @@ -69,7 +77,11 @@
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"metadata": {
"jupyter": {
"source_hidden": true
}
},
"outputs": [],
"source": [
"# Visualize location of area of study\n",
Expand All @@ -92,9 +104,14 @@
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"metadata": {
"jupyter": {
"source_hidden": true
}
},
"outputs": [],
"source": [
"%%time\n",
"# The flooding event primarily happened during 04/30 - 05/02\n",
"# We will search for data before and after the event\n",
"start_date = datetime(year=2024, month=4, day=1)\n",
Expand All @@ -106,12 +123,8 @@
"\n",
"# Setup PySTAC client\n",
"# POCLOUD refers to the PO DAAC cloud environment that hosts earth observation data\n",
"catalog = Client.open(f'{STAC_URL}/POCLOUD/') \n",
"\n",
"# Setup PySTAC client\n",
"provider_cat = Client.open(STAC_URL)\n",
"catalog = Client.open(f'{STAC_URL}/POCLOUD/')\n",
"collections = [\"OPERA_L3_DSWX-HLS_V1\"]\n",
"collections = [\"OPERA_L3_DSWX-HLS_V1_1.0\"]\n",
"\n",
"search_opts = {\n",
" 'bbox' : Point(*livingston_tx_lonlat).buffer(0.01).bounds, \n",
Expand All @@ -127,7 +140,11 @@
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"metadata": {
"jupyter": {
"source_hidden": true
}
},
"outputs": [],
"source": [
"def search_to_df(results, layer_name):\n",
Expand All @@ -150,16 +167,24 @@
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"metadata": {
"jupyter": {
"source_hidden": true
}
},
"outputs": [],
"source": [
"granules = search_to_df(results=results, layer_name='0_B01_WTR')"
"%time granules = search_to_df(results=results, layer_name='0_B01_WTR')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"metadata": {
"jupyter": {
"source_hidden": true
}
},
"outputs": [],
"source": [
"# We now filter the dataframe to restrict our results to a single tile_id\n",
Expand All @@ -171,7 +196,11 @@
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"metadata": {
"jupyter": {
"source_hidden": true
}
},
"outputs": [],
"source": [
"def urls_to_dataset(granule_dataframe):\n",
Expand Down Expand Up @@ -225,16 +254,37 @@
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"metadata": {
"jupyter": {
"source_hidden": true
}
},
"outputs": [],
"source": [
"dataset= urls_to_dataset(granules)"
"%time dataset= urls_to_dataset(granules)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"metadata": {
"jupyter": {
"source_hidden": true
}
},
"outputs": [],
"source": [
"dataset # Examine the attributes of the dataset object"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"jupyter": {
"source_hidden": true
}
},
"outputs": [],
"source": [
"# Define a colormap\n",
Expand All @@ -247,7 +297,11 @@
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"metadata": {
"jupyter": {
"source_hidden": true
}
},
"outputs": [],
"source": [
"img = dataset.hvplot.image(title = 'DSWx data for May 2024 Texas floods',\n",
Expand Down Expand Up @@ -280,7 +334,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.3"
"version": "3.12.4"
}
},
"nbformat": 4,
Expand Down
76 changes: 57 additions & 19 deletions 04_Case_Studies/1b-BhakraNangal-Reservoir.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,11 @@
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"metadata": {
"jupyter": {
"source_hidden": true
}
},
"outputs": [],
"source": [
"from warnings import filterwarnings\n",
Expand Down Expand Up @@ -57,7 +61,11 @@
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"metadata": {
"jupyter": {
"source_hidden": true
}
},
"outputs": [],
"source": [
"bhakra_dam = (76.46, 31.42)\n",
Expand All @@ -70,7 +78,11 @@
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"metadata": {
"jupyter": {
"source_hidden": true
}
},
"outputs": [],
"source": [
"bhakra_dam_gv = gv.Points([bhakra_dam])\n",
Expand All @@ -92,9 +104,14 @@
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"metadata": {
"jupyter": {
"source_hidden": true
}
},
"outputs": [],
"source": [
"%%time\n",
"# We will query the DSWx product record to understand variations in water levels in the reservoir\n",
"start_date = datetime(year=2023, month=4, day=1)\n",
"stop_date = datetime(year=2024, month=4, day=1)\n",
Expand All @@ -105,12 +122,8 @@
"\n",
"# Setup PySTAC client\n",
"# POCLOUD refers to the PO DAAC cloud environment that hosts earth observation data\n",
"catalog = Client.open(f'{STAC_URL}/POCLOUD/') \n",
"\n",
"# Setup PySTAC client\n",
"provider_cat = Client.open(STAC_URL)\n",
"catalog = Client.open(f'{STAC_URL}/POCLOUD/')\n",
"collections = [\"OPERA_L3_DSWX-HLS_V1\"]\n",
"collections = [\"OPERA_L3_DSWX-HLS_V1_1.0\"]\n",
"\n",
"# Setup search options\n",
"opts = {\n",
Expand All @@ -128,7 +141,11 @@
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"metadata": {
"jupyter": {
"source_hidden": true
}
},
"outputs": [],
"source": [
"def filter_search_by_cc(results, cloud_threshold=10):\n",
Expand Down Expand Up @@ -160,9 +177,14 @@
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"metadata": {
"jupyter": {
"source_hidden": true
}
},
"outputs": [],
"source": [
"%%time\n",
"# let's filter our results so that only scenes with less than 10% cloud cover are returned\n",
"results = filter_search_by_cc(results)\n",
"\n",
Expand All @@ -172,7 +194,11 @@
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"metadata": {
"jupyter": {
"source_hidden": true
}
},
"outputs": [],
"source": [
"# Load results into dataframe\n",
Expand All @@ -182,16 +208,24 @@
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"metadata": {
"jupyter": {
"source_hidden": true
}
},
"outputs": [],
"source": [
"dataset= urls_to_dataset(granules)"
"%time dataset= urls_to_dataset(granules)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"metadata": {
"jupyter": {
"source_hidden": true
}
},
"outputs": [],
"source": [
"# Define a colormap\n",
Expand All @@ -204,7 +238,11 @@
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"metadata": {
"jupyter": {
"source_hidden": true
}
},
"outputs": [],
"source": [
"img = dataset.hvplot.image(title = 'Bhakra Nangal Dam, India - water extent over a year',\n",
Expand All @@ -225,7 +263,7 @@
],
"metadata": {
"kernelspec": {
"display_name": "climaterisk",
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
Expand All @@ -239,9 +277,9 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.3"
"version": "3.12.4"
}
},
"nbformat": 4,
"nbformat_minor": 2
"nbformat_minor": 4
}
Loading

0 comments on commit 17cc174

Please sign in to comment.