Skip to content

Commit

Permalink
Merge branch 'main' into ao_update_precip_var
Browse files Browse the repository at this point in the history
  • Loading branch information
acordonez authored Apr 23, 2024
2 parents 029e843 + d05a7fe commit 9d35ceb
Show file tree
Hide file tree
Showing 25 changed files with 1,353 additions and 556 deletions.
6 changes: 6 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,9 @@ Release Notes and History

| <div style="width:300%">[Versions]</div> | Update summary |
| ------------- | ------------------------------------- |
| [v3.3.4] | Technical update
| [v3.3.3] | Technical update
| [v3.3.2] | Technical update
| [v3.3.1] | Technical update
| [v3.3] | New metric added: **Sea-Ice**
| [v3.2] | New metric added: **Extremes**
Expand Down Expand Up @@ -133,6 +136,9 @@ Release Notes and History


[Versions]: https://github.com/PCMDI/pcmdi_metrics/releases
[v3.3.4]: https://github.com/PCMDI/pcmdi_metrics/releases/tag/v3.3.4
[v3.3.3]: https://github.com/PCMDI/pcmdi_metrics/releases/tag/v3.3.3
[v3.3.2]: https://github.com/PCMDI/pcmdi_metrics/releases/tag/v3.3.2
[v3.3.1]: https://github.com/PCMDI/pcmdi_metrics/releases/tag/v3.3.1
[v3.3]: https://github.com/PCMDI/pcmdi_metrics/releases/tag/v3.3
[v3.2]: https://github.com/PCMDI/pcmdi_metrics/releases/tag/v3.2
Expand Down
4 changes: 4 additions & 0 deletions pcmdi_metrics/mean_climate/lib/compute_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,10 @@ def compute_metrics(Var, dm, do, debug=False, time_dim_sync=False):

metrics_dictionary = OrderedDict()

# QC for bounds
dm = dm.bounds.add_missing_bounds()
do = do.bounds.add_missing_bounds()

if var in ["hus"]:
sig_digits = ".5f"
else:
Expand Down
21 changes: 17 additions & 4 deletions pcmdi_metrics/mean_climate/lib/load_and_regrid.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import numpy as np
import xcdat as xc

from pcmdi_metrics.io import xcdat_open
from pcmdi_metrics.io import get_latitude, get_longitude, xcdat_open


def load_and_regrid(
Expand Down Expand Up @@ -40,9 +40,14 @@ def load_and_regrid(
# SET CONDITIONAL ON INPUT VARIABLE
if varname == "pr":
print("Adjust units for pr")
if ds[varname_in_file].units == "kg m-2 s-1":
ds[varname_in_file] = ds[varname_in_file] * 86400
print("pr units adjusted to [mm d-1] from [kg m-2 s-1] by 86400 multiplied")
if "units" in ds[varname_in_file].attrs:
if ds[varname_in_file].units == "kg m-2 s-1":
ds[varname_in_file] = ds[varname_in_file] * 86400
print(
"pr units adjusted to [mm d-1] from [kg m-2 s-1] by 86400 multiplied"
)
else:
ds[varname_in_file] = ds[varname_in_file] * 86400 # Assumed as kg m-2 s-1

"""
# calendar quality check
Expand Down Expand Up @@ -118,6 +123,14 @@ def load_and_regrid(
print("ERROR: load and regrid can not complete")
return

# axis
lat = get_latitude(ds)
lon = get_longitude(ds)
if "axis" not in lat.attrs:
ds[lat.name].attrs["axis"] = "Y"
if "axis" not in lon.attrs:
ds[lon.name].attrs["axis"] = "X"

# regrid
if regrid_tool == "regrid2":
ds_regridded = ds.regridder.horizontal(
Expand Down
3 changes: 2 additions & 1 deletion pcmdi_metrics/mean_climate/mean_climate_driver.py
Original file line number Diff line number Diff line change
Expand Up @@ -398,6 +398,7 @@
ds_test_dict[region],
ds_ref_dict[region],
debug=debug,
time_dim_sync=True,
)
except Exception:
result_dict["RESULTS"][model][ref][run][
Expand All @@ -407,7 +408,7 @@
ds_test_dict[region],
ds_ref_dict[region],
debug=debug,
time_dim_sync=True,
time_dim_sync=False,
)

# write individual JSON
Expand Down
11 changes: 10 additions & 1 deletion pcmdi_metrics/precip_variability/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

Reference: Ahn, M.-S., P. J. Gleckler, J. Lee, A. G. Pendergrass, and C. Jakob, 2022: Benchmarking Simulated Precipitation Variability Amplitude across Timescales. Journal of Climate. https://doi.org/10.1175/JCLI-D-21-0542.1

Demo: [precipitation variability across timescales](https://github.com/PCMDI/pcmdi_metrics/blob/93c30ce485719ecf6a531a4ee47886160ebb73e4/doc/jupyter/Demo/Demo_7_precip_variability.ipynb)
Demo: [precipitation variability across timescales](https://github.com/PCMDI/pcmdi_metrics/blob/main/doc/jupyter/Demo/Demo_7_precip_variability.ipynb)

## Driver code:
- `variability_across_timescales_PS_driver.py`
Expand All @@ -19,3 +19,12 @@ Demo: [precipitation variability across timescales](https://github.com/PCMDI/pcm
- `scripts_pcmdi/`
- `run_obs.bash`
- `run_parallel.wait.bash`

## Figure scripts:

These scripts can be modified by the user to postprocess the output from `variability_across_timescales_PS_driver.py` as a step needed to create Figure 6. The `*_regional.py` scripts are used for the custom region output case, not the default region set.

- `scripts_pcmdi/`
- `calc_ps_area_freq_mean_regional.py`
- `calc_ps_area_mean_regional.py`
- `calc_ps_freq_mean_regional.py`
Original file line number Diff line number Diff line change
@@ -0,0 +1,134 @@
import copy
import datetime
import json
import os
import pickle

import numpy as np


def prdday_to_frqidx(prdday, frequency, ntd):
"""
Find frequency index from input period
Input
- prdday: period (day)
- frequency: frequency
- ntd: number of time steps per day (daily: 1, 3-hourly: 8)
Output
- idx: frequency index
"""
frq = 1.0 / (float(prdday) * ntd)
idx = (np.abs(frequency - frq)).argmin()
return int(idx)


# --------------------
# User settings here
# --------------------
# "3hr" or "day"
hr = ""
# Input file name (pickle .pkl output from calc_ps_area.mean.py)
fname = ""
# Output directory (not including version)
outdir = ""

# -----------------------
# The rest of the script
# -----------------------
ver = datetime.datetime.now().strftime("v%Y%m%d")

if hr == "day":
frqs_forced = ["semi-annual", "annual"]
frqs_unforced = ["synoptic", "sub-seasonal", "seasonal-annual", "interannual"]
ntd = 1
elif hr == "3hr":
frqs_forced = ["semi-diurnal", "diurnal", "semi-annual", "annual"]
frqs_unforced = [
"sub-daily",
"synoptic",
"sub-seasonal",
"seasonal-annual",
"interannual",
]
ntd = 8

infile = open(fname, "rb")
psdm = pickle.load(infile)

psdmfm = copy.deepcopy(psdm)
for frc in psdm.keys():
if frc == "forced":
frqs = frqs_forced
elif frc == "unforced":
frqs = frqs_unforced
print(frc)
for mip in psdm[frc].keys():
print(mip)
for dat in psdm[frc][mip].keys():
frequency = np.array(psdm[frc][mip][dat]["freqs"])
del psdm[frc][mip][dat]["freqs"]
del psdmfm[frc][mip][dat]["freqs"]

for var in psdm[frc][mip][dat].keys():
print(dat, var)
for idm, dom in enumerate(psdm[frc][mip][dat][var].keys()):
am = np.array(psdm[frc][mip][dat][var][dom])
del psdmfm[frc][mip][dat][var][dom]
psdmfm[frc][mip][dat][var][dom] = {}
print(dom)
for frq in frqs:
print(frq)
if frq == "semi-diurnal": # pr=0.5day
idx = prdday_to_frqidx(0.5, frequency, ntd)
amfm = am[idx]
elif frq == "diurnal": # pr=1day
idx = prdday_to_frqidx(1, frequency, ntd)
amfm = am[idx]
if frq == "semi-annual": # 180day=<pr=<183day
idx2 = prdday_to_frqidx(180, frequency, ntd)
idx1 = prdday_to_frqidx(183, frequency, ntd)
amfm = np.amax(am[idx1 : idx2 + 1])
elif frq == "annual": # 360day=<pr=<366day
idx2 = prdday_to_frqidx(360, frequency, ntd)
idx1 = prdday_to_frqidx(366, frequency, ntd)
amfm = np.amax(am[idx1 : idx2 + 1])
elif frq == "sub-daily": # pr<1day
idx1 = prdday_to_frqidx(1, frequency, ntd)
amfm = np.nanmean(am[idx1 + 1 :])
elif frq == "synoptic": # 1day=<pr<20day
idx2 = prdday_to_frqidx(1, frequency, ntd)
idx1 = prdday_to_frqidx(20, frequency, ntd)
amfm = np.nanmean(am[idx1 + 1 : idx2 + 1])
elif frq == "sub-seasonal": # 20day=<pr<90day
idx2 = prdday_to_frqidx(20, frequency, ntd)
idx1 = prdday_to_frqidx(90, frequency, ntd)
amfm = np.nanmean(am[idx1 + 1 : idx2 + 1])
elif frq == "seasonal-annual": # 90day=<pr<365day
idx2 = prdday_to_frqidx(90, frequency, ntd)
idx1 = prdday_to_frqidx(365, frequency, ntd)
amfm = np.nanmean(am[idx1 + 1 : idx2 + 1])
elif frq == "interannual": # 365day=<pr
idx2 = prdday_to_frqidx(365, frequency, ntd)
amfm = np.nanmean(am[: idx2 + 1])

psdmfm[frc][mip][dat][var][dom][frq] = amfm.tolist()

res = os.path.basename(fname).split("_")[2].split(".")[1]

outdir = os.path.join(outdir, ver) # add version to outdir
if not (os.path.isdir(outdir)):
os.makedirs(outdir)

# Save as pickle for figure creation
outfile = open(
outdir + "/PS_pr.{0}_regrid.{1}_area.freq.mean.forced.pkl".format(hr, res), "wb"
)
pickle.dump(psdmfm, outfile)
outfile.close()

# Write to JSON file
outfile = open(
outdir + "/PS_pr.{0}_regrid.{1}_area.freq.mean.forced.json".format(hr, res), "w"
)
json.dump(psdmfm, outfile, sort_keys=True, indent=4, separators=(",", ": "))
outfile.close()
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
import datetime
import glob
import os
import pickle

import xcdat as xc

# --------------------
# User settings here
# --------------------
# List of domain names
# e.g. ['CONUS']
domains = []
# Which ensembles are included
# e.g. ['cmip6','obs']
mips = []
# Input directories. Replace model, realization,
# and other variables with wildcard as needed
# Replace domain name references with '%(domain)'
model_root = ""
obs_root = ""
# Output directory (not including version)
outdir = ""

# -----------------------
# The rest of the script
# -----------------------
ver = datetime.datetime.now().strftime("v%Y%m%d")

frcs = ["forced", "unforced"]

vars = ["power", "rednoise", "sig95"]

psdm = {}

for ifc, frc in enumerate(frcs):
psdm[frc] = {}
for dom in domains:
for im, mip in enumerate(mips):
if mip not in psdm[frc]:
psdm[frc][mip] = {}
dir = "/"
if mip == "cmip6":
root = model_root.replace("%(domain)", dom)
elif mip == "obs":
root = obs_root.replace("%(domain)", dom)
print(root)
if frc == "forced":
file_list = sorted(
set(glob.glob(os.path.join(root, "PS*")))
- set(glob.glob(os.path.join(root, "PS*_unforced.nc")))
- set(glob.glob(os.path.join(root, "PS*.json")))
)
elif frc == "unforced":
file_list = sorted(
set(glob.glob(os.path.join(root, "PS*_unforced.nc")))
)

data = []
for file in file_list:
print("File (first loop): ", file)
if mip == "obs":
tmp = file.split("/")[-1].split("_")[3]
model = tmp.split(".")[0]
data.append(model)
else:
tmp = file.split("/")[-1].split("_")[3]
model = tmp.split(".")[0]
ens = tmp.split(".")[1]
data.append(model + "." + ens)
print(mip, "# of data:", len(data))
print("DATA: ", data)

for id, dat in enumerate(data):
# freqs = f[id]['freqs']
print("File (second loop): ", file_list[id])
frds = xc.open_dataset(file_list[id])
if dat not in psdm[frc][mip]:
psdm[frc][mip][dat] = {}
if "freqs" not in psdm[frc][mip][dat]:
psdm[frc][mip][dat]["freqs"] = list(frds["freqs"])
for iv, var in enumerate(vars):
print(frc, mip, dat, var)
if var not in psdm[frc][mip][dat]:
psdm[frc][mip][dat][var] = {}
am = frds.spatial.average(var, axis=["X", "Y"], weights="generate")[
var
]
psdm[frc][mip][dat][var][dom] = list(am.data)
frds.close()

res = os.path.basename(file_list[0]).split("_")[2].split(".")[1]
hr = os.path.basename(file_list[0]).split("_")[1].split(".")[1]

outdir = os.path.join(outdir, ver) # add version to outdir
if not (os.path.isdir(outdir)):
os.makedirs(outdir)

outfile = open(
os.path.join(outdir, "PS_pr.{0}_regrid.{1}_area.mean.pkl".format(hr, res)), "wb"
)
pickle.dump(psdm, outfile)
outfile.close()
Loading

0 comments on commit 9d35ceb

Please sign in to comment.