From 8d299641ddbc37db946d2819c662391ae537cd89 Mon Sep 17 00:00:00 2001 From: gleckler1 Date: Thu, 14 Jul 2022 15:03:34 -0700 Subject: [PATCH 001/130] new file to prototype with xcdat --- .../pcmdi_compute_climatologies-xcdat.py | 214 ++++++++++++++++++ 1 file changed, 214 insertions(+) create mode 100644 pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py diff --git a/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py b/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py new file mode 100644 index 000000000..f17bb030d --- /dev/null +++ b/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py @@ -0,0 +1,214 @@ +#!/usr/bin/env python +import datetime + +import cdms2 +from genutil import StringConstructor + +import pcmdi_metrics + +ver = datetime.datetime.now().strftime("v%Y%m%d") + +cdms2.setNetcdfShuffleFlag(0) +cdms2.setNetcdfDeflateFlag(0) +cdms2.setNetcdfDeflateLevelFlag(0) + +# + + +def clim_calc(var, infile, outfile, outdir, outfilename, start, end): + import datetime + import os + + import cdms2 + import cdtime + import cdutil + + ver = datetime.datetime.now().strftime("v%Y%m%d") + + lf = infile + tmp = lf.split("/") + infilename = tmp[len(tmp) - 1] + print("infilename is ", infilename) + + f = cdms2.open(lf) + atts = f.listglobal() + outfd = outfile + + # CONTROL OF OUTPUT DIRECTORY AND FILE + + # outdir AND outfilename PROVIDED BY USER + if outdir is not None and outfilename is not None: + outfd = outdir + outfilename + + # outdir PROVIDED BY USER, BUT filename IS TAKEN FROM infilename WITH CLIM MODIFICATIONS SUFFIX ADDED BELOW + if outdir is not None and outfilename is None: + outfd = outdir + "/" + infilename + + if outdir is None and outfilename is None: + outfd = outfile + + print("outfd is ", outfd) + print("outdir is ", outdir) + + seperate_clims = "y" + + # DEFAULT CLIM - BASED ON ENTIRE TIME SERIES + if (start is None) and (end is None): + d = f(var) + t = d.getTime() + c = t.asComponentTime() + start_yr_str = str(c[0].year) + start_mo_str = str(c[0].month) + end_yr_str = str(c[len(c) - 1].year) + end_mo_str = str(c[len(c) - 1].month) + start_yr = int(start_yr_str) + start_mo = int(start_mo_str) + end_yr = int(end_yr_str) + end_mo = int(end_mo_str) + + # USER DEFINED PERIOD + else: + start_mo = int(start.split("-")[1]) + start_yr = int(start.split("-")[0]) + end_mo = int(end.split("-")[1]) + end_yr = int(end.split("-")[0]) + start_yr_str = str(start_yr) + start_mo_str = str(start_mo) + end_yr_str = str(end_yr) + end_mo_str = str(end_mo) + + d = f( + var, time=(cdtime.comptime(start_yr, start_mo), cdtime.comptime(end_yr, end_mo)) + ) + + print("start_yr_str is ", start_yr_str) + + if start_mo_str not in ["11", "12"]: + start_mo_str = "0" + start_mo_str + if end_mo_str not in ["11", "12"]: + end_mo_str = "0" + end_mo_str + + d_ac = cdutil.ANNUALCYCLE.climatology(d).astype("float32") + d_djf = cdutil.DJF.climatology(d)(squeeze=1).astype("float32") + d_jja = cdutil.JJA.climatology(d)(squeeze=1).astype("float32") + d_son = cdutil.SON.climatology(d)(squeeze=1).astype("float32") + d_mam = cdutil.MAM.climatology(d)(squeeze=1).astype("float32") + + for v in [d_ac, d_djf, d_jja, d_son, d_mam]: + + v.id = var + + for s in ["AC", "DJF", "MAM", "JJA", "SON"]: + + addf = ( + "." + + start_yr_str + + start_mo_str + + "-" + + end_yr_str + + end_mo_str + + "." + + s + + "." + + ver + + ".nc" + ) + + if seperate_clims == "y": + print("outfd is ", outfd) + out = outfd + out = out.replace(".nc", addf) + out = out.replace(".xml", addf) + print("out is ", out) + + if seperate_clims == "n": + out = outfd.replace("climo.nc", s + ".nc") + if s == "AC": + do = d_ac + if s == "DJF": + do = d_djf + if s == "MAM": + do = d_mam + if s == "JJA": + do = d_jja + if s == "SON": + do = d_son + do.id = var + + # MKDIRS AS NEEDED + lst = outfd.split("/") + s = "/" + for ll in range(len(lst)): + d = s.join(lst[0 : ll + 1]) + try: + os.mkdir(d) + except OSError: + pass + + g = cdms2.open(out, "w+") + g.write(do) + + for att in atts: + setattr(g, att, f.getglobal(att)) + g.close() + print(do.shape, " ", d_ac.shape, " ", out) + f.close() + return + + +####################################################################### + + +P = pcmdi_metrics.driver.pmp_parser.PMPMetricsParser() + + +P.add_argument( + "--vars", dest="vars", help="List of variables", nargs="+", required=False +) +P.add_argument("--infile", dest="infile", help="Defines infile", required=False) +P.add_argument( + "--outfile", dest="outfile", help="Defines output path and filename", required=False +) +P.add_argument("--outpath", dest="outpath", help="Defines outpath only", required=False) +P.add_argument( + "--outfilename", + dest="outfilename", + help="Defines out filename only", + required=False, +) +P.add_argument( + "--start", dest="start", help="Defines start year and month", required=False +) +P.add_argument("--end", dest="end", help="Defines end year and month", required=False) + +args = P.get_parameter() + +infile_template = args.infile +outfile_template = args.outfile +outpath_template = args.outpath +outfilename_template = args.outfilename +varlist = args.vars +start = args.start +end = args.end + +print("start and end are ", start, " ", end) +print("variable list: ", varlist) + +InFile = StringConstructor(infile_template) +OutFile = StringConstructor(outfile_template) +OutFileName = StringConstructor(outfilename_template) +OutPath = StringConstructor(outpath_template) + +for var in varlist: + # Build filenames + InFile.variable = var + OutFile.variable = var + OutFileName.variable = var + OutPath.variable = var + infile = InFile() + outfile = OutFile() + outfilename = OutFileName() + outpath = OutPath() + + # calculate climatologies for this variable + clim_calc(var, infile, outfile, outpath, outfilename, start, end) From a1d152d104e0a80df05eb80e50b67f11b2c851f2 Mon Sep 17 00:00:00 2001 From: gleckler1 Date: Wed, 17 Aug 2022 17:51:27 -0700 Subject: [PATCH 002/130] working on xcdat fcn --- .../pcmdi_compute_climatologies-xcdat.py | 85 ++++++++++++++++++- .../scripts/pcmdi_compute_climatologies.py | 11 +-- 2 files changed, 88 insertions(+), 8 deletions(-) diff --git a/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py b/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py index f17bb030d..3383418dc 100644 --- a/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py +++ b/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py @@ -14,6 +14,89 @@ # +def clim_calc_x(var, infile, outfile, outpath, outfilename, start, end): + import datetime + import os + import xcdat + import xarray + + ver = datetime.datetime.now().strftime("v%Y%m%d") + print('time is ', ver) + + lf = infile + tmp = lf.split("/") + infilename = tmp[len(tmp) - 1] + print("infilename is ", infilename) + + d = xcdat.open_dataset(lf, data_var=var) + atts = d.attrs + outfd = outfile + outdir = os.path.dirname(outfd) + + print(type(d)) + print(atts) + print(outfd) + print(outdir) + print('done') + +# CONTROL OF OUTPUT DIRECTORY AND FILE + + print("outfd is ", outfd) +# print("outdir is ", outdir) + + seperate_clims = "y" + + c = d.time +# print(c) + +# DEFAULT CLIM - BASED ON ENTIRE TIME SERIES + if (start is None) and (end is None): + start_yr_str = str(int(c["time.year"][0])) + start_mo_str = str(int(c["time.month"][0])) + end_yr_str = str(int(c["time.year"][len(c) - 1])) + end_mo_str = str(int(c["time.month"][len(c) - 1])) + start_yr = int(start_yr_str) + start_mo = int(start_mo_str) + end_yr = int(end_yr_str) + end_mo = int(end_mo_str) + print(start_yr_str,start_mo_str,end_yr_str,end_mo_str) + +# USER DEFINED PERIOD + else: + start_mo = int(start.split("-")[1]) + start_yr = int(start.split("-")[0]) + end_mo = int(end.split("-")[1]) + end_yr = int(end.split("-")[0]) + start_yr_str = str(start_yr) + start_mo_str = str(start_mo) + end_yr_str = str(end_yr) + end_mo_str = str(end_mo) + + d = d.sel(time=slice(start_yr_str + '-' + start_mo_str, end_yr_str + '-' + end_mo_str)) +# print(d) + + print("start_yr_str is ", start_yr_str) + + if start_mo_str not in ["11", "12"]: + start_mo_str = "0" + start_mo_str + if end_mo_str not in ["11", "12"]: + end_mo_str = "0" + end_mo_str + + d_djf = d.temporal.climatology(var, freq="season", weighted=True, season_config={"dec_mode": "DJF", "drop_incomplete_djf": True},).isel(time=slice(0, 1)) + d_mam = d.temporal.climatology(var, freq="season", weighted=True, season_config={"dec_mode": "DJF", "drop_incomplete_djf": True},).isel(time=slice(1, 2)) + d_jja = d.temporal.climatology(var, freq="season", weighted=True, season_config={"dec_mode": "DJF", "drop_incomplete_djf": True},).isel(time=slice(2, 3)) + d_son = d.temporal.climatology(var, freq="season", weighted=True, season_config={"dec_mode": "DJF", "drop_incomplete_djf": True},).isel(time=slice(3, 4)) +# d_ac = d.temporal.climatology(var, freq="month", weighted=True) + + + print(d_son) +# print(d_ac) + + + + +##### + def clim_calc(var, infile, outfile, outdir, outfilename, start, end): import datetime @@ -211,4 +294,4 @@ def clim_calc(var, infile, outfile, outdir, outfilename, start, end): outpath = OutPath() # calculate climatologies for this variable - clim_calc(var, infile, outfile, outpath, outfilename, start, end) + clim_calc_x(var, infile, outfile, outpath, outfilename, start, end) diff --git a/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies.py b/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies.py index f17bb030d..44d8ae3a2 100644 --- a/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies.py +++ b/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies.py @@ -163,12 +163,9 @@ def clim_calc(var, infile, outfile, outdir, outfilename, start, end): P.add_argument( - "--vars", dest="vars", help="List of variables", nargs="+", required=False -) + "--vars", dest="vars", help="List of variables", nargs="+", required=False) P.add_argument("--infile", dest="infile", help="Defines infile", required=False) -P.add_argument( - "--outfile", dest="outfile", help="Defines output path and filename", required=False -) +P.add_argument( "--outfile", dest="outfile", help="Defines output path and filename", required=False) P.add_argument("--outpath", dest="outpath", help="Defines outpath only", required=False) P.add_argument( "--outfilename", @@ -177,8 +174,8 @@ def clim_calc(var, infile, outfile, outdir, outfilename, start, end): required=False, ) P.add_argument( - "--start", dest="start", help="Defines start year and month", required=False -) + "--start", dest="start", help="Defines start year and month", required=False) + P.add_argument("--end", dest="end", help="Defines end year and month", required=False) args = P.get_parameter() From d7908b5c490dc6b6101a3eaa8d92bdd11090ff30 Mon Sep 17 00:00:00 2001 From: gleckler1 Date: Wed, 17 Aug 2022 18:01:42 -0700 Subject: [PATCH 003/130] added clim-xcdat to setup --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index e323ed341..ee107d35d 100755 --- a/setup.py +++ b/setup.py @@ -66,6 +66,7 @@ scripts = [ "pcmdi_metrics/pcmdi/scripts/mean_climate_driver.py", "pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies.py", + "pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py", "pcmdi_metrics/misc/scripts/parallelize_driver.py", "pcmdi_metrics/misc/scripts/get_pmp_data.py", "pcmdi_metrics/monsoon_wang/scripts/mpindex_compute.py", From 5429b4054b579b8ea942f57fd30e2ae5153bf4c6 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Wed, 24 Aug 2022 18:01:58 -0700 Subject: [PATCH 004/130] simplify seasonal clim calculation to avoid unnessasary repeating --- .../scripts/pcmdi_compute_climatologies-xcdat.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py b/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py index 3383418dc..66a76d981 100644 --- a/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py +++ b/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py @@ -82,19 +82,19 @@ def clim_calc_x(var, infile, outfile, outpath, outfilename, start, end): if end_mo_str not in ["11", "12"]: end_mo_str = "0" + end_mo_str - d_djf = d.temporal.climatology(var, freq="season", weighted=True, season_config={"dec_mode": "DJF", "drop_incomplete_djf": True},).isel(time=slice(0, 1)) - d_mam = d.temporal.climatology(var, freq="season", weighted=True, season_config={"dec_mode": "DJF", "drop_incomplete_djf": True},).isel(time=slice(1, 2)) - d_jja = d.temporal.climatology(var, freq="season", weighted=True, season_config={"dec_mode": "DJF", "drop_incomplete_djf": True},).isel(time=slice(2, 3)) - d_son = d.temporal.climatology(var, freq="season", weighted=True, season_config={"dec_mode": "DJF", "drop_incomplete_djf": True},).isel(time=slice(3, 4)) -# d_ac = d.temporal.climatology(var, freq="month", weighted=True) + d_clim = d.temporal.climatology(var, freq="season", weighted=True, season_config={"dec_mode": "DJF", "drop_incomplete_djf": True},) + d_djf = d_clim[var][0] + d_mam = d_clim[var][1] + d_jja = d_clim[var][2] + d_son = d_clim[var][3] + + d_ac = d.temporal.climatology(var, freq="month", weighted=True) print(d_son) # print(d_ac) - - ##### From 8131a60e2e9c52ee80f031a4910ed368e1bf0209 Mon Sep 17 00:00:00 2001 From: gleckler1 Date: Thu, 25 Aug 2022 16:45:31 -0700 Subject: [PATCH 005/130] minor --- .../pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py b/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py index 3383418dc..acdab1259 100644 --- a/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py +++ b/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py @@ -86,11 +86,11 @@ def clim_calc_x(var, infile, outfile, outpath, outfilename, start, end): d_mam = d.temporal.climatology(var, freq="season", weighted=True, season_config={"dec_mode": "DJF", "drop_incomplete_djf": True},).isel(time=slice(1, 2)) d_jja = d.temporal.climatology(var, freq="season", weighted=True, season_config={"dec_mode": "DJF", "drop_incomplete_djf": True},).isel(time=slice(2, 3)) d_son = d.temporal.climatology(var, freq="season", weighted=True, season_config={"dec_mode": "DJF", "drop_incomplete_djf": True},).isel(time=slice(3, 4)) -# d_ac = d.temporal.climatology(var, freq="month", weighted=True) + d_ac = d.temporal.climatology(var, freq="month", weighted=True) print(d_son) -# print(d_ac) + print(d_ac) From 469842d49674e6f14fdb9a2f9891c95b7876c7f8 Mon Sep 17 00:00:00 2001 From: gleckler1 Date: Thu, 25 Aug 2022 16:53:44 -0700 Subject: [PATCH 006/130] got JW edits --- .../pcmdi_compute_climatologies-xcdat.py | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py b/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py index 23dbf8055..935384c36 100644 --- a/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py +++ b/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py @@ -72,7 +72,7 @@ def clim_calc_x(var, infile, outfile, outpath, outfilename, start, end): end_yr_str = str(end_yr) end_mo_str = str(end_mo) - d = d.sel(time=slice(start_yr_str + '-' + start_mo_str, end_yr_str + '-' + end_mo_str)) +# d = d.sel(time=slice(start_yr_str + '-' + start_mo_str, end_yr_str + '-' + end_mo_str)) # print(d) print("start_yr_str is ", start_yr_str) @@ -82,22 +82,19 @@ def clim_calc_x(var, infile, outfile, outpath, outfilename, start, end): if end_mo_str not in ["11", "12"]: end_mo_str = "0" + end_mo_str -<<<<<<< HEAD - d_djf = d.temporal.climatology(var, freq="season", weighted=True, season_config={"dec_mode": "DJF", "drop_incomplete_djf": True},).isel(time=slice(0, 1)) - d_mam = d.temporal.climatology(var, freq="season", weighted=True, season_config={"dec_mode": "DJF", "drop_incomplete_djf": True},).isel(time=slice(1, 2)) - d_jja = d.temporal.climatology(var, freq="season", weighted=True, season_config={"dec_mode": "DJF", "drop_incomplete_djf": True},).isel(time=slice(2, 3)) - d_son = d.temporal.climatology(var, freq="season", weighted=True, season_config={"dec_mode": "DJF", "drop_incomplete_djf": True},).isel(time=slice(3, 4)) - d_ac = d.temporal.climatology(var, freq="month", weighted=True) -======= + d_clim = d.temporal.climatology(var, freq="season", weighted=True, season_config={"dec_mode": "DJF", "drop_incomplete_djf": True},) ->>>>>>> 5429b4054b579b8ea942f57fd30e2ae5153bf4c6 + + print('above seasons') d_djf = d_clim[var][0] d_mam = d_clim[var][1] d_jja = d_clim[var][2] d_son = d_clim[var][3] - + print('below seasons') + d_ac = d.temporal.climatology(var, freq="month", weighted=True) + print('below ac') print(d_son) print(d_ac) From d873a789c928445737e0a0f43b7717c338360fb6 Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Thu, 20 Oct 2022 16:37:44 -0700 Subject: [PATCH 007/130] remove old backup file for obs_info_dict json --- .../obs_info_dictionary.json.bak171117 | 571 ------------------ 1 file changed, 571 deletions(-) delete mode 100644 pcmdi_metrics/pcmdi/scripts/obs_info_dictionary.json.bak171117 diff --git a/pcmdi_metrics/pcmdi/scripts/obs_info_dictionary.json.bak171117 b/pcmdi_metrics/pcmdi/scripts/obs_info_dictionary.json.bak171117 deleted file mode 100644 index d6581e9c4..000000000 --- a/pcmdi_metrics/pcmdi/scripts/obs_info_dictionary.json.bak171117 +++ /dev/null @@ -1,571 +0,0 @@ -{ - "hfss": { - "ERAINT": { - "CMIP_CMOR_TABLE": "ERAINT", - "MD5sum": "f4ae4c9033830ef12a666fba2a6ba9b0", - "RefName": "ERAINT", - "RefTrackingDate": "Tue Jul 12 17:28:00 2011", - "filename": "hfss_ERAINT_000001-000012_ac.nc", - "period": "", - "shape": "(12, 121, 240)" - } - }, - "hur": { - "ERA40": { - "CMIP_CMOR_TABLE": "ERA40", - "MD5sum": "968e4b2340c6d189a9a076c665ce3c5e", - "RefName": "ERA40", - "RefTrackingDate": "Tue Jul 12 17:50:28 2011", - "filename": "hur_ERA40_000001-000012_ac.nc", - "period": "", - "shape": "(12, 23, 73, 144)" - }, - "ERAINT": { - "CMIP_CMOR_TABLE": "ERAINT", - "MD5sum": "f5ad752af20fdf03c9c492a54deb9b36", - "RefName": "ERAINT", - "RefTrackingDate": "Tue Jul 12 17:48:17 2011", - "filename": "hur_ERAINT_000001-000012_ac.nc", - "period": "", - "shape": "(12, 17, 121, 240)" - } - }, - "hus": { - "ERA40": { - "CMIP_CMOR_TABLE": "ERA40", - "MD5sum": "bedc62255919424d66356c98d457e095", - "RefName": "ERA40", - "RefTrackingDate": "Wed Jul 13 10:11:17 2011", - "filename": "hus_ERA40_000001-000012_ac.nc", - "period": "", - "shape": "(12, 23, 73, 144)" - }, - "ERAINT": { - "CMIP_CMOR_TABLE": "ERAINT", - "MD5sum": "b4ff5a481e368b75170487cb2fca3f5b", - "RefName": "ERAINT", - "RefTrackingDate": "Wed Jul 13 10:08:12 2011", - "filename": "hus_ERAINT_000001-000012_ac.nc", - "period": "", - "shape": "(12, 17, 121, 240)" - }, - "JRA25": { - "CMIP_CMOR_TABLE": "JRA25", - "MD5sum": "bcd341cb2b19bf53eecac8ed4497d3a8", - "RefName": "JRA25", - "RefTrackingDate": "Wed Jul 13 10:06:19 2011", - "filename": "hus_JRA25_000001-000012_ac.nc", - "period": "", - "shape": "(12, 12, 145, 288)" - }, - "alternate1": "ERA40", - "alternate2": "JRA25", - "default": "ERAINT" - }, - "huss": { - "JRA25": { - "CMIP_CMOR_TABLE": "JRA25", - "MD5sum": "6a4a8d5516fc4e13ef10443d0b3d5909", - "RefName": "JRA25", - "RefTrackingDate": "Tue Jul 12 18:10:35 2011", - "filename": "huss_JRA25_000001-000012_ac.nc", - "period": "", - "shape": "(12, 145, 288)" - } - }, - "pr": { - "GPCP": { - "CMIP_CMOR_TABLE": "GPCP", - "MD5sum": "144d2807b833ced066db4956014c9472", - "RefName": "GPCP", - "RefTrackingDate": "Tue Jul 12 17:28:09 2011", - "filename": "pr_GPCP_000001-000012_ac.nc", - "period": "", - "shape": "(12, 72, 144)" - }, - "default": "GPCP" - }, - "prw": { - "RSS": { - "CMIP_CMOR_TABLE": "RSS", - "MD5sum": "76cd666442e53ddca1d000a3d390b00e", - "RefName": "RSS", - "RefTrackingDate": "Tue Jul 12 18:10:43 2011", - "filename": "prw_RSS_000001-000012_ac.nc", - "period": "", - "shape": "(12, 72, 144)" - }, - "default": "RSS" - }, - "psl": { - "ERA40": { - "CMIP_CMOR_TABLE": "ERA40", - "MD5sum": "e8564f7b1f228da6eef1ce5dfa98dec7", - "RefName": "ERA40", - "RefTrackingDate": "Tue Jul 12 17:28:25 2011", - "filename": "psl_ERA40_000001-000012_ac.nc", - "period": "", - "shape": "(12, 73, 144)" - }, - "ERAINT": { - "CMIP_CMOR_TABLE": "ERAINT", - "MD5sum": "ecc0ac7ded5ef4a42a9733d57783e3bd", - "RefName": "ERAINT", - "RefTrackingDate": "Tue Jul 12 17:28:38 2011", - "filename": "psl_ERAINT_000001-000012_ac.nc", - "period": "", - "shape": "(12, 121, 240)" - }, - "alternate1": "ERA40", - "default": "ERAINT" - }, - "rlds": { - "CERES": { - "CMIP_CMOR_TABLE": "CERES", - "MD5sum": "e1f5aa61b4501d005731ac6656372b39", - "RefName": "CERES", - "RefTrackingDate": "Wed Jun 1 14:02:18 2016", - "filename": "rlds_CERES_000001-000012_ac.nc", - "period": "", - "shape": "(12, 180, 360)" - }, - "default": "CERES" - }, - "rltcre": { - "CERES": { - "CMIP_CMOR_TABLE": "CERES", - "MD5sum": "07cb1fbed2fdc51ffafc689b35ac05ce", - "RefName": "CERES", - "RefTrackingDate": "Wed Jun 1 16:30:44 2016", - "filename": "rltcre_CERES_000001-000012_ac.nc", - "period": "", - "shape": "(12, 180, 360)" - }, - "default": "CERES" - }, - "rlus": { - "CERES": { - "CMIP_CMOR_TABLE": "CERES", - "MD5sum": "741ac684af3ad1e190ca14edf36663d0", - "RefName": "CERES", - "RefTrackingDate": "Wed Jun 1 14:02:18 2016", - "filename": "rlus_CERES_000001-000012_ac.nc", - "period": "", - "shape": "(12, 180, 360)" - }, - "default": "CERES" - }, - "rlut": { - "CERES": { - "CMIP_CMOR_TABLE": "CERES", - "MD5sum": "146eec5e8a734fedbbdd0909921530f2", - "RefName": "CERES", - "RefTrackingDate": "Wed Jun 1 16:30:44 2016", - "filename": "rlut_CERES_000001-000012_ac.nc", - "period": "", - "shape": "(12, 180, 360)" - }, - "default": "CERES" - }, - "rlutcs": { - "CERES": { - "CMIP_CMOR_TABLE": "CERES", - "MD5sum": "7e77061fb5d5399ecad4360779ceb63f", - "RefName": "CERES", - "RefTrackingDate": "Wed Jun 1 16:30:44 2016", - "filename": "rlutcs_CERES_000001-000012_ac.nc", - "period": "", - "shape": "(12, 180, 360)" - }, - "default": "CERES" - }, - "rsds": { - "CERES": { - "CMIP_CMOR_TABLE": "CERES", - "MD5sum": "f5e81fc625b1bce448020cbeb79b1fd1", - "RefName": "CERES", - "RefTrackingDate": "Wed Jun 1 14:02:17 2016", - "filename": "rsds_CERES_000001-000012_ac.nc", - "period": "", - "shape": "(12, 180, 360)" - }, - "default": "CERES" - }, - "rsdscs": { - "CERES": { - "CMIP_CMOR_TABLE": "CERES", - "MD5sum": "8255a25979cacfae6561b31658f49f43", - "RefName": "CERES", - "RefTrackingDate": "Wed Jun 1 14:02:18 2016", - "filename": "rsdscs_CERES_000001-000012_ac.nc", - "period": "", - "shape": "(12, 180, 360)" - }, - "default": "CERES" - }, - "rsdt": { - "CERES": { - "CMIP_CMOR_TABLE": "CERES", - "MD5sum": "033c05ea6447ec2b3be88016014ac2ef", - "RefName": "CERES", - "RefTrackingDate": "Wed Jun 1 14:32:40 2016", - "filename": "rsdt_CERES_000001-000012_ac.nc", - "period": "", - "shape": "(12, 180, 360)" - }, - "default": "CERES" - }, - "rst": { - "CERES": { - "CMIP_CMOR_TABLE": "CERES", - "MD5sum": "45ee2704bd9013c4f6589cbcccb4f893", - "RefName": "CERES", - "RefTrackingDate": "Wed Jun 1 16:30:44 2016", - "filename": "rst_CERES_000001-000012_ac.nc", - "period": "", - "shape": "(12, 180, 360)" - }, - "default": "CERES" - }, - "rstcre": { - "CERES": { - "CMIP_CMOR_TABLE": "CERES", - "MD5sum": "43ae9b34119b1d885a53e087720c1a72", - "RefName": "CERES", - "RefTrackingDate": "Wed Jun 1 16:30:43 2016", - "filename": "rstcre_CERES_000001-000012_ac.nc", - "period": "", - "shape": "(12, 180, 360)" - }, - "default": "CERES" - }, - "rsuscs": { - "CERES": { - "CMIP_CMOR_TABLE": "CERES", - "MD5sum": "822e5dd388c12b2a520bce79a34b765c", - "RefName": "CERES", - "RefTrackingDate": "Wed Jun 1 14:02:18 2016", - "filename": "rsuscs_CERES_000001-000012_ac.nc", - "period": "", - "shape": "(12, 180, 360)" - } - }, - "rsutcs": { - "CERES": { - "CMIP_CMOR_TABLE": "CERES", - "MD5sum": "e77f527db0bf303ec35e4df7e2c662fb", - "RefName": "CERES", - "RefTrackingDate": "Wed Jun 1 16:30:45 2016", - "filename": "rsutcs_CERES_000001-000012_ac.nc", - "period": "", - "shape": "(12, 180, 360)" - }, - "default": "CERES" - }, - "rtnet": { - "CERES": { - "CMIP_CMOR_TABLE": "CERES", - "MD5sum": "b3b28ee6496fe87a79dad308fc935f78", - "RefName": "CERES", - "RefTrackingDate": "Wed Jun 1 16:30:44 2016", - "filename": "rtnet_CERES_000001-000012_ac.nc", - "period": "", - "shape": "(12, 180, 360)" - } - }, - "rtnetcre": { - "CERES": { - "CMIP_CMOR_TABLE": "CERES", - "MD5sum": "d9d8973d589548124049d527331d5799", - "RefName": "CERES", - "RefTrackingDate": "Wed Jun 1 16:30:43 2016", - "filename": "rtnetcre_CERES_000001-000012_ac.nc", - "period": "", - "shape": "(12, 180, 360)" - } - }, - "sftlf": { - "ERA40": { - "CMIP_CMOR_TABLE": "fx", - "MD5sum": "b1e262027dba038c89eb51e6d7c4be17", - "filename": "sftlf_pcmdi-metrics_fx_ECMWF-ERA40_195709-200208.nc", - "shape": "(73, 144)" - }, - "ERAINT": { - "CMIP_CMOR_TABLE": "fx", - "MD5sum": "ba6582ba6c87f00d37c09843602e74e9", - "filename": "sftlf_pcmdi-metrics_fx_ECMWF-ERAInterim_197901-201407.nc", - "shape": "(121, 240)" - }, - "JRA25": { - "CMIP_CMOR_TABLE": "fx", - "MD5sum": "47530ba8fd46e66656bd52b29d19f41e", - "filename": "sftlf_pcmdi-metrics_fx_NCAR-JRA25_197901-201401.nc", - "shape": "(145, 288)" - }, - "UKMETOFFICE-HadISST-v1-1": { - "CMIP_CMOR_TABLE": "fx", - "filename": "sftlf_pcmdi-metrics_fx_UKMETOFFICE-HadISST-v1-1_198002-200501-clim.nc", - "shape": "(180, 360)" - } - }, - "ta": { - "ERA40": { - "CMIP_CMOR_TABLE": "ERA40", - "MD5sum": "6b3e6c1cb5c26e76140d2580ef80def0", - "RefName": "ERA40", - "RefTrackingDate": "Tue Jul 12 18:08:51 2011", - "filename": "ta_ERA40_000001-000012_ac.nc", - "period": "", - "shape": "(12, 23, 73, 144)" - }, - "ERAINT": { - "CMIP_CMOR_TABLE": "ERAINT", - "MD5sum": "9036381da7020e5edff322435d1e0dba", - "RefName": "ERAINT", - "RefTrackingDate": "Tue Jul 12 18:06:41 2011", - "filename": "ta_ERAINT_000001-000012_ac.nc", - "period": "", - "shape": "(12, 17, 121, 240)" - }, - "JRA25": { - "CMIP_CMOR_TABLE": "JRA25", - "MD5sum": "38835cb0b12bbd780ff783ff6b8a18fc", - "RefName": "JRA25", - "RefTrackingDate": "Tue Jul 12 18:05:04 2011", - "filename": "ta_JRA25_000001-000012_ac.nc", - "period": "", - "shape": "(12, 23, 145, 288)" - }, - "alternate1": "ERA40", - "alternate2": "JRA25", - "default": "ERAINT" - }, - "tas": { - "ERA40": { - "CMIP_CMOR_TABLE": "ERA40", - "MD5sum": "cd563e1c34f20b6aefc5c849cf4d153a", - "RefName": "ERA40", - "RefTrackingDate": "Tue Jul 12 17:45:41 2011", - "filename": "tas_ERA40_000001-000012_ac.nc", - "period": "", - "shape": "(12, 73, 144)" - }, - "ERAINT": { - "CMIP_CMOR_TABLE": "ERAINT", - "MD5sum": "b53a23756718590f89a26064c76c6b05", - "RefName": "ERAINT", - "RefTrackingDate": "Tue Jul 12 17:46:20 2011", - "filename": "tas_ERAINT_000001-000012_ac.nc", - "period": "", - "shape": "(12, 121, 240)" - }, - "JRA25": { - "CMIP_CMOR_TABLE": "JRA25", - "MD5sum": "77cc9aab998c684e75f1ccea24f42225", - "RefName": "JRA25", - "RefTrackingDate": "Tue Jul 12 17:46:07 2011", - "filename": "tas_JRA25_000001-000012_ac.nc", - "period": "", - "shape": "(12, 145, 288)" - }, - "alternate1": "ERA40", - "alternate2": "JRA25", - "default": "ERAINT" - }, - "tauu": { - "ERAINT": { - "CMIP_CMOR_TABLE": "ERAINT", - "MD5sum": "6fd1193a64e63da8d5f2fde9de303cbc", - "RefName": "ERAINT", - "RefTrackingDate": "Tue Jul 12 18:10:04 2011", - "filename": "tauu_ERAINT_000001-000012_ac.nc", - "period": "", - "shape": "(12, 121, 240)" - }, - "default": "ERAINT" - }, - "tauv": { - "ERAINT": { - "CMIP_CMOR_TABLE": "ERAINT", - "MD5sum": "98def159da59eadb06b5cbf7a7af2946", - "RefName": "ERAINT", - "RefTrackingDate": "Tue Jul 12 17:46:33 2011", - "filename": "tauv_ERAINT_000001-000012_ac.nc", - "period": "", - "shape": "(12, 121, 240)" - }, - "default": "ERAINT" - }, - "ts": { - "ERAINT": { - "CMIP_CMOR_TABLE": "ERAINT", - "MD5sum": "941ea2f75d02aa92dc3a58343063af21", - "RefName": "ERAINT", - "RefTrackingDate": "Tue Jul 12 17:27:47 2011", - "filename": "ts_ERAINT_000001-000012_ac.nc", - "period": "", - "shape": "(12, 121, 240)" - } - }, - "ua": { - "ERA40": { - "CMIP_CMOR_TABLE": "ERA40", - "MD5sum": "52fe5c4c5059a54597d3e72986c751d9", - "RefName": "ERA40", - "RefTrackingDate": "Tue Jul 12 18:27:51 2011", - "filename": "ua_ERA40_000001-000012_ac.nc", - "period": "", - "shape": "(12, 23, 73, 144)" - }, - "ERAINT": { - "CMIP_CMOR_TABLE": "ERAINT", - "MD5sum": "dca2dc9e8d2650a93cc8400775964cad", - "RefName": "ERAINT", - "RefTrackingDate": "Tue Jul 12 18:12:29 2011", - "filename": "ua_ERAINT_000001-000012_ac.nc", - "period": "", - "shape": "(12, 17, 121, 240)" - }, - "JRA25": { - "CMIP_CMOR_TABLE": "JRA25", - "MD5sum": "8e69acc4c20ad87716b70e440ef008f9", - "RefName": "JRA25", - "RefTrackingDate": "Tue Jul 12 18:25:45 2011", - "filename": "ua_JRA25_000001-000012_ac.nc", - "period": "", - "shape": "(12, 23, 145, 288)" - }, - "alternate1": "ERA40", - "alternate2": "JRA25", - "default": "ERAINT" - }, - "uas": { - "ERA40": { - "CMIP_CMOR_TABLE": "ERA40", - "MD5sum": "50ad1dc285fe61dbf398e4640a119c7d", - "RefName": "ERA40", - "RefTrackingDate": "Tue Jul 12 18:09:35 2011", - "filename": "uas_ERA40_000001-000012_ac.nc", - "period": "", - "shape": "(12, 73, 144)" - }, - "ERAINT": { - "CMIP_CMOR_TABLE": "ERAINT", - "MD5sum": "c4085df6de0ac8ad760a7b02f77e6083", - "RefName": "ERAINT", - "RefTrackingDate": "Tue Jul 12 18:09:50 2011", - "filename": "uas_ERAINT_000001-000012_ac.nc", - "period": "", - "shape": "(12, 121, 240)" - }, - "JRA25": { - "CMIP_CMOR_TABLE": "JRA25", - "MD5sum": "f6918d6e96a42993fd7657efad64fa22", - "RefName": "JRA25", - "RefTrackingDate": "Tue Jul 12 18:09:19 2011", - "filename": "uas_JRA25_000001-000012_ac.nc", - "period": "", - "shape": "(12, 145, 288)" - }, - "alternate1": "ERA40", - "alternate2": "JRA25", - "default": "ERAINT" - }, - "va": { - "ERA40": { - "CMIP_CMOR_TABLE": "ERA40", - "MD5sum": "7adc89da5ad6031415d5aef9407990e4", - "RefName": "ERA40", - "RefTrackingDate": "Tue Jul 12 17:13:02 2011", - "filename": "va_ERA40_000001-000012_ac.nc", - "period": "", - "shape": "(12, 23, 73, 144)" - }, - "ERAINT": { - "CMIP_CMOR_TABLE": "ERAINT", - "MD5sum": "c99d076d1ce415ee07b510c3f4983a9f", - "RefName": "ERAINT", - "RefTrackingDate": "Tue Jul 12 17:27:33 2011", - "filename": "va_ERAINT_000001-000012_ac.nc", - "period": "", - "shape": "(12, 17, 121, 240)" - }, - "JRA25": { - "CMIP_CMOR_TABLE": "JRA25", - "MD5sum": "2781c58ae5d22de6415b8bbbfecf2c83", - "RefName": "JRA25", - "RefTrackingDate": "Tue Jul 12 17:25:52 2011", - "filename": "va_JRA25_000001-000012_ac.nc", - "period": "", - "shape": "(12, 23, 145, 288)" - }, - "alternate1": "ERA40", - "alternate2": "JRA25", - "default": "ERAINT" - }, - "vas": { - "ERA40": { - "CMIP_CMOR_TABLE": "ERA40", - "MD5sum": "005f37ca8a51abd3e5ba63ec59a257fa", - "RefName": "ERA40", - "RefTrackingDate": "Tue Jul 12 18:28:55 2011", - "filename": "vas_ERA40_000001-000012_ac.nc", - "period": "", - "shape": "(12, 73, 144)" - }, - "ERAINT": { - "CMIP_CMOR_TABLE": "ERAINT", - "MD5sum": "4176326c9c9c710d9cf55011d5700441", - "RefName": "ERAINT", - "RefTrackingDate": "Tue Jul 12 18:28:15 2011", - "filename": "vas_ERAINT_000001-000012_ac.nc", - "period": "", - "shape": "(12, 121, 240)" - }, - "JRA25": { - "CMIP_CMOR_TABLE": "JRA25", - "MD5sum": "8755e78f04b0ae406a987d233262a01c", - "RefName": "JRA25", - "RefTrackingDate": "Tue Jul 12 18:28:40 2011", - "filename": "vas_JRA25_000001-000012_ac.nc", - "period": "", - "shape": "(12, 145, 288)" - }, - "alternate1": "ERA40", - "alternate2": "JRA25", - "default": "ERAINT" - }, - "zg": { - "ERA40": { - "CMIP_CMOR_TABLE": "ERA40", - "MD5sum": "c84cf70c32fec73b40717dfeb5eaacd0", - "RefName": "ERA40", - "RefTrackingDate": "Tue Jul 12 17:45:24 2011", - "filename": "zg_ERA40_000001-000012_ac.nc", - "period": "", - "shape": "(12, 23, 73, 144)" - }, - "ERAINT": { - "CMIP_CMOR_TABLE": "ERAINT", - "MD5sum": "92728f47ce4e30701b73fdcc49a203b9", - "RefName": "ERAINT", - "RefTrackingDate": "Tue Jul 12 17:43:09 2011", - "filename": "zg_ERAINT_000001-000012_ac.nc", - "period": "", - "shape": "(12, 17, 121, 240)" - }, - "JRA25": { - "CMIP_CMOR_TABLE": "JRA25", - "MD5sum": "d05fe901617979ab0f54681ea181adbb", - "RefName": "JRA25", - "RefTrackingDate": "Tue Jul 12 17:41:32 2011", - "filename": "zg_JRA25_000001-000012_ac.nc", - "period": "", - "shape": "(12, 23, 145, 288)" - }, - "alternate1": "ERA40", - "alternate2": "JRA25", - "default": "ERAINT" - } -} From d23c66f951d8d04da2a6874534b699a69473e2f0 Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Thu, 20 Oct 2022 17:25:56 -0700 Subject: [PATCH 008/130] clean up --- .../pcmdi_compute_climatologies-xcdat.py | 338 ++++++------------ 1 file changed, 101 insertions(+), 237 deletions(-) diff --git a/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py b/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py index 935384c36..aca4f40c1 100644 --- a/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py +++ b/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py @@ -1,56 +1,43 @@ #!/usr/bin/env python import datetime - -import cdms2 from genutil import StringConstructor - +import os +import xcdat import pcmdi_metrics -ver = datetime.datetime.now().strftime("v%Y%m%d") - -cdms2.setNetcdfShuffleFlag(0) -cdms2.setNetcdfDeflateFlag(0) -cdms2.setNetcdfDeflateLevelFlag(0) -# - -def clim_calc_x(var, infile, outfile, outpath, outfilename, start, end): - import datetime - import os - import xcdat - import xarray +def clim_calc_x(var, infile, outfile=None, outpath=None, outfilename=None, start=None, end=None): ver = datetime.datetime.now().strftime("v%Y%m%d") print('time is ', ver) - lf = infile - tmp = lf.split("/") + tmp = infile.split("/") infilename = tmp[len(tmp) - 1] print("infilename is ", infilename) - d = xcdat.open_dataset(lf, data_var=var) + d = xcdat.open_dataset(infile, data_var=var) atts = d.attrs - outfd = outfile - outdir = os.path.dirname(outfd) - + print(type(d)) print(atts) - print(outfd) - print(outdir) - print('done') -# CONTROL OF OUTPUT DIRECTORY AND FILE + # CONTROL OF OUTPUT DIRECTORY AND FILE + if outpath is None: + outdir = os.path.dirname(outfile) + else: + outdir = outpath + os.makedirs(outdir, exist_ok=True) - print("outfd is ", outfd) -# print("outdir is ", outdir) + print("outdir is ", outdir) seperate_clims = "y" c = d.time -# print(c) + # print(c) -# DEFAULT CLIM - BASED ON ENTIRE TIME SERIES + # CLIM PERIOD if (start is None) and (end is None): + # DEFAULT CLIM - BASED ON ENTIRE TIME SERIES start_yr_str = str(int(c["time.year"][0])) start_mo_str = str(int(c["time.month"][0])) end_yr_str = str(int(c["time.year"][len(c) - 1])) @@ -59,10 +46,9 @@ def clim_calc_x(var, infile, outfile, outpath, outfilename, start, end): start_mo = int(start_mo_str) end_yr = int(end_yr_str) end_mo = int(end_mo_str) - print(start_yr_str,start_mo_str,end_yr_str,end_mo_str) - -# USER DEFINED PERIOD + print(start_yr_str, start_mo_str, end_yr_str, end_mo_str) else: + # USER DEFINED PERIOD start_mo = int(start.split("-")[1]) start_yr = int(start.split("-")[0]) end_mo = int(end.split("-")[1]) @@ -72,122 +58,33 @@ def clim_calc_x(var, infile, outfile, outpath, outfilename, start, end): end_yr_str = str(end_yr) end_mo_str = str(end_mo) -# d = d.sel(time=slice(start_yr_str + '-' + start_mo_str, end_yr_str + '-' + end_mo_str)) -# print(d) + d = d.sel(time=slice(start_yr_str + '-' + start_mo_str + '-01', + end_yr_str + '-' + end_mo_str + '-31')) + # print(d) print("start_yr_str is ", start_yr_str) - - if start_mo_str not in ["11", "12"]: - start_mo_str = "0" + start_mo_str - if end_mo_str not in ["11", "12"]: - end_mo_str = "0" + end_mo_str - + print("start_mo_str is ", start_mo_str) + print("end_yr_str is ", end_yr_str) + print("end_mo_str is ", end_mo_str) + + start_mo_str = start_mo_str.zfill(2) + end_mo_str = end_mo_str.zfill(2) d_clim = d.temporal.climatology(var, freq="season", weighted=True, season_config={"dec_mode": "DJF", "drop_incomplete_djf": True},) - - print('above seasons') - - d_djf = d_clim[var][0] - d_mam = d_clim[var][1] - d_jja = d_clim[var][2] - d_son = d_clim[var][3] - print('below seasons') + + d_clim_dict = dict() + + d_clim_dict['DJF'] = d_clim.isel(time=0) + d_clim_dict['MAM'] = d_clim.isel(time=1) + d_clim_dict['JJA'] = d_clim.isel(time=2) + d_clim_dict['SON'] = d_clim.isel(time=3) d_ac = d.temporal.climatology(var, freq="month", weighted=True) print('below ac') - - print(d_son) - print(d_ac) - - -##### - - -def clim_calc(var, infile, outfile, outdir, outfilename, start, end): - import datetime - import os - - import cdms2 - import cdtime - import cdutil - - ver = datetime.datetime.now().strftime("v%Y%m%d") - - lf = infile - tmp = lf.split("/") - infilename = tmp[len(tmp) - 1] - print("infilename is ", infilename) - - f = cdms2.open(lf) - atts = f.listglobal() - outfd = outfile - - # CONTROL OF OUTPUT DIRECTORY AND FILE - - # outdir AND outfilename PROVIDED BY USER - if outdir is not None and outfilename is not None: - outfd = outdir + outfilename - - # outdir PROVIDED BY USER, BUT filename IS TAKEN FROM infilename WITH CLIM MODIFICATIONS SUFFIX ADDED BELOW - if outdir is not None and outfilename is None: - outfd = outdir + "/" + infilename - - if outdir is None and outfilename is None: - outfd = outfile - - print("outfd is ", outfd) - print("outdir is ", outdir) - - seperate_clims = "y" - - # DEFAULT CLIM - BASED ON ENTIRE TIME SERIES - if (start is None) and (end is None): - d = f(var) - t = d.getTime() - c = t.asComponentTime() - start_yr_str = str(c[0].year) - start_mo_str = str(c[0].month) - end_yr_str = str(c[len(c) - 1].year) - end_mo_str = str(c[len(c) - 1].month) - start_yr = int(start_yr_str) - start_mo = int(start_mo_str) - end_yr = int(end_yr_str) - end_mo = int(end_mo_str) - - # USER DEFINED PERIOD - else: - start_mo = int(start.split("-")[1]) - start_yr = int(start.split("-")[0]) - end_mo = int(end.split("-")[1]) - end_yr = int(end.split("-")[0]) - start_yr_str = str(start_yr) - start_mo_str = str(start_mo) - end_yr_str = str(end_yr) - end_mo_str = str(end_mo) - - d = f( - var, time=(cdtime.comptime(start_yr, start_mo), cdtime.comptime(end_yr, end_mo)) - ) - - print("start_yr_str is ", start_yr_str) - - if start_mo_str not in ["11", "12"]: - start_mo_str = "0" + start_mo_str - if end_mo_str not in ["11", "12"]: - end_mo_str = "0" + end_mo_str - - d_ac = cdutil.ANNUALCYCLE.climatology(d).astype("float32") - d_djf = cdutil.DJF.climatology(d)(squeeze=1).astype("float32") - d_jja = cdutil.JJA.climatology(d)(squeeze=1).astype("float32") - d_son = cdutil.SON.climatology(d)(squeeze=1).astype("float32") - d_mam = cdutil.MAM.climatology(d)(squeeze=1).astype("float32") - - for v in [d_ac, d_djf, d_jja, d_son, d_mam]: - - v.id = var - + + d_clim_dict['AC'] = d_ac + for s in ["AC", "DJF", "MAM", "JJA", "SON"]: - addf = ( "." + start_yr_str @@ -201,102 +98,69 @@ def clim_calc(var, infile, outfile, outdir, outfilename, start, end): + ver + ".nc" ) + print("outfd is ", outfd) + out = out.replace(".nc", addf) + + if outfilename is not None: + out = os.path.join(outdir, outfilename) + + print("out is ", out) + d_clim_dict[s].to_netcdf(out)) + + +if __name__ == "__main__": + + ver = datetime.datetime.now().strftime("v%Y%m%d") - if seperate_clims == "y": - print("outfd is ", outfd) - out = outfd - out = out.replace(".nc", addf) - out = out.replace(".xml", addf) - print("out is ", out) - - if seperate_clims == "n": - out = outfd.replace("climo.nc", s + ".nc") - if s == "AC": - do = d_ac - if s == "DJF": - do = d_djf - if s == "MAM": - do = d_mam - if s == "JJA": - do = d_jja - if s == "SON": - do = d_son - do.id = var - - # MKDIRS AS NEEDED - lst = outfd.split("/") - s = "/" - for ll in range(len(lst)): - d = s.join(lst[0 : ll + 1]) - try: - os.mkdir(d) - except OSError: - pass - - g = cdms2.open(out, "w+") - g.write(do) - - for att in atts: - setattr(g, att, f.getglobal(att)) - g.close() - print(do.shape, " ", d_ac.shape, " ", out) - f.close() - return - - -####################################################################### - - -P = pcmdi_metrics.driver.pmp_parser.PMPMetricsParser() - - -P.add_argument( - "--vars", dest="vars", help="List of variables", nargs="+", required=False -) -P.add_argument("--infile", dest="infile", help="Defines infile", required=False) -P.add_argument( - "--outfile", dest="outfile", help="Defines output path and filename", required=False -) -P.add_argument("--outpath", dest="outpath", help="Defines outpath only", required=False) -P.add_argument( - "--outfilename", - dest="outfilename", - help="Defines out filename only", - required=False, -) -P.add_argument( - "--start", dest="start", help="Defines start year and month", required=False -) -P.add_argument("--end", dest="end", help="Defines end year and month", required=False) - -args = P.get_parameter() - -infile_template = args.infile -outfile_template = args.outfile -outpath_template = args.outpath -outfilename_template = args.outfilename -varlist = args.vars -start = args.start -end = args.end - -print("start and end are ", start, " ", end) -print("variable list: ", varlist) - -InFile = StringConstructor(infile_template) -OutFile = StringConstructor(outfile_template) -OutFileName = StringConstructor(outfilename_template) -OutPath = StringConstructor(outpath_template) - -for var in varlist: - # Build filenames - InFile.variable = var - OutFile.variable = var - OutFileName.variable = var - OutPath.variable = var - infile = InFile() - outfile = OutFile() - outfilename = OutFileName() - outpath = OutPath() + P = pcmdi_metrics.driver.pmp_parser.PMPMetricsParser() - # calculate climatologies for this variable - clim_calc_x(var, infile, outfile, outpath, outfilename, start, end) + P.add_argument( + "--vars", dest="vars", help="List of variables", nargs="+", required=False + ) + P.add_argument("--infile", dest="infile", help="Defines infile", required=False) + P.add_argument( + "--outfile", dest="outfile", help="Defines output path and filename", required=False + ) + P.add_argument("--outpath", dest="outpath", help="Defines outpath only", required=False) + P.add_argument( + "--outfilename", + dest="outfilename", + help="Defines out filename only", + required=False, + ) + P.add_argument( + "--start", dest="start", help="Defines start year and month", required=False + ) + P.add_argument("--end", dest="end", help="Defines end year and month", required=False) + + args = P.get_parameter() + + infile_template = args.infile + outfile_template = args.outfile + outpath_template = args.outpath + outfilename_template = args.outfilename + varlist = args.vars + start = args.start + end = args.end + + print("start and end are ", start, " ", end) + print("variable list: ", varlist) + + InFile = StringConstructor(infile_template) + OutFile = StringConstructor(outfile_template) + OutFileName = StringConstructor(outfilename_template) + OutPath = StringConstructor(outpath_template) + + for var in varlist: + # Build filenames + InFile.variable = var + OutFile.variable = var + OutFileName.variable = var + OutPath.variable = var + infile = InFile() + outfile = OutFile() + outfilename = OutFileName() + outpath = OutPath() + + # calculate climatologies for this variable + clim_calc_x(var, infile, outfile, outpath, outfilename, start, end) From f0d177917a816bf984d4f1fe5134b64a3852b40b Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 20 Oct 2022 18:32:14 -0700 Subject: [PATCH 009/130] add xcdat and temporary function that can open xml using xcdat --- conda-env/dev.yml | 2 + pcmdi_metrics/io/__init__.py | 1 + .../pcmdi_compute_climatologies-xcdat.py | 47 ++++++++++--------- 3 files changed, 29 insertions(+), 21 deletions(-) diff --git a/conda-env/dev.yml b/conda-env/dev.yml index aa8032b33..91c1e1fd1 100644 --- a/conda-env/dev.yml +++ b/conda-env/dev.yml @@ -19,6 +19,8 @@ dependencies: - eofs=1.4.0 - seaborn=0.11.1 - enso_metrics=1.1.1 + - xcdat=0.3.3 + - xmltodict=0.13.0 # Testing # ================== - pre_commit=2.15.0 diff --git a/pcmdi_metrics/io/__init__.py b/pcmdi_metrics/io/__init__.py index d52b46a38..fbf02bef0 100644 --- a/pcmdi_metrics/io/__init__.py +++ b/pcmdi_metrics/io/__init__.py @@ -1,3 +1,4 @@ # init for pcmdi_metrics.io from . import base # noqa from .base import MV2Json # noqa +from .xcdat_openxml import xcdat_open # noqa diff --git a/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py b/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py index aca4f40c1..296389e23 100644 --- a/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py +++ b/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py @@ -1,8 +1,10 @@ #!/usr/bin/env python import datetime -from genutil import StringConstructor import os + import xcdat +from genutil import StringConstructor + import pcmdi_metrics @@ -17,11 +19,12 @@ def clim_calc_x(var, infile, outfile=None, outpath=None, outfilename=None, start d = xcdat.open_dataset(infile, data_var=var) atts = d.attrs - + print(type(d)) print(atts) - # CONTROL OF OUTPUT DIRECTORY AND FILE + # CONTROL OF OUTPUT DIRECTORY AND FILE + out = outfile if outpath is None: outdir = os.path.dirname(outfile) else: @@ -30,16 +33,14 @@ def clim_calc_x(var, infile, outfile=None, outpath=None, outfilename=None, start print("outdir is ", outdir) - seperate_clims = "y" - c = d.time # print(c) # CLIM PERIOD if (start is None) and (end is None): # DEFAULT CLIM - BASED ON ENTIRE TIME SERIES - start_yr_str = str(int(c["time.year"][0])) - start_mo_str = str(int(c["time.month"][0])) + start_yr_str = str(int(c["time.year"][0])) + start_mo_str = str(int(c["time.month"][0])) end_yr_str = str(int(c["time.year"][len(c) - 1])) end_mo_str = str(int(c["time.month"][len(c) - 1])) start_yr = int(start_yr_str) @@ -58,7 +59,7 @@ def clim_calc_x(var, infile, outfile=None, outpath=None, outfilename=None, start end_yr_str = str(end_yr) end_mo_str = str(end_mo) - d = d.sel(time=slice(start_yr_str + '-' + start_mo_str + '-01', + d = d.sel(time=slice(start_yr_str + '-' + start_mo_str + '-01', end_yr_str + '-' + end_mo_str + '-31')) # print(d) @@ -66,24 +67,24 @@ def clim_calc_x(var, infile, outfile=None, outpath=None, outfilename=None, start print("start_mo_str is ", start_mo_str) print("end_yr_str is ", end_yr_str) print("end_mo_str is ", end_mo_str) - + start_mo_str = start_mo_str.zfill(2) end_mo_str = end_mo_str.zfill(2) d_clim = d.temporal.climatology(var, freq="season", weighted=True, season_config={"dec_mode": "DJF", "drop_incomplete_djf": True},) - + d_clim_dict = dict() - + d_clim_dict['DJF'] = d_clim.isel(time=0) d_clim_dict['MAM'] = d_clim.isel(time=1) d_clim_dict['JJA'] = d_clim.isel(time=2) d_clim_dict['SON'] = d_clim.isel(time=3) - - d_ac = d.temporal.climatology(var, freq="month", weighted=True) + + d_ac = d.temporal.climatology(var, freq="month", weighted=True) print('below ac') - + d_clim_dict['AC'] = d_ac - + for s in ["AC", "DJF", "MAM", "JJA", "SON"]: addf = ( "." @@ -98,18 +99,16 @@ def clim_calc_x(var, infile, outfile=None, outpath=None, outfilename=None, start + ver + ".nc" ) - print("outfd is ", outfd) - out = out.replace(".nc", addf) - if outfilename is not None: out = os.path.join(outdir, outfilename) - + out = out.replace(".nc", addf) + print("out is ", out) - d_clim_dict[s].to_netcdf(out)) + d_clim_dict[s].to_netcdf(out) if __name__ == "__main__": - + ver = datetime.datetime.now().strftime("v%Y%m%d") P = pcmdi_metrics.driver.pmp_parser.PMPMetricsParser() @@ -162,5 +161,11 @@ def clim_calc_x(var, infile, outfile=None, outpath=None, outfilename=None, start outfilename = OutFileName() outpath = OutPath() + print('var:', var) + print('infile:', infile) + print('outfile:', outfile) + print('outfilename:', outfilename) + print('outpath:', outpath) + # calculate climatologies for this variable clim_calc_x(var, infile, outfile, outpath, outfilename, start, end) From 396e631225350b7b4492bbe8aa2e400ce4575d20 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 20 Oct 2022 18:32:50 -0700 Subject: [PATCH 010/130] temporary function that can open xml using xcdat --- pcmdi_metrics/io/xcdat_openxml.py | 55 +++++++++++++++++++++++++++++++ 1 file changed, 55 insertions(+) create mode 100644 pcmdi_metrics/io/xcdat_openxml.py diff --git a/pcmdi_metrics/io/xcdat_openxml.py b/pcmdi_metrics/io/xcdat_openxml.py new file mode 100644 index 000000000..0eff8eba5 --- /dev/null +++ b/pcmdi_metrics/io/xcdat_openxml.py @@ -0,0 +1,55 @@ +import xmltodict +import glob +import os +import sys +import xcdat as xc + + +def xcdat_open(infile): + """ + Parameter + --------- + infile: + list of string, or string + File(s) to open using xcdat + Output + ------ + ds: + xcdat dataset + """ + if isinstance(infile, list): + ds = xcdat.open_mfdataset(infile) + else: + if infile.split('.')[-1].lower() == 'xml': + ds = xcdat_openxml(infile) + else: + ds = xcdat.open_dataset(infile) + + return ds + + +def xcdat_openxml(xmlfile): + """ + Parameter + --------- + infile: + xml file to open using xcdat + Output + ------ + ds: + xcdat dataset + """ + if not os.path.exists(xmlfile): + sys.exit('ERROR: File not exist: {}'.format(xmlfile)) + + with open(xmlfile) as fd: + doc = xmltodict.parse(fd.read()) + + ncfile_list = glob.glob(os.path.join(doc['dataset']['@directory'], '*.nc')) + + if len(ncfile_list) > 1: + ds = xc.open_mfdataset(ncfile_list) + else: + ds = xc.open_dataset(ncfile_list[0]) + + return ds From 8fd0d4f1b439a79ee0e2d9fee936c8867f1e1021 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 20 Oct 2022 21:17:29 -0700 Subject: [PATCH 011/130] use xcdat for climatology calculation --- pcmdi_metrics/io/xcdat_openxml.py | 35 +++++++++++-------- .../pcmdi_compute_climatologies-xcdat.py | 27 +++++++------- 2 files changed, 35 insertions(+), 27 deletions(-) diff --git a/pcmdi_metrics/io/xcdat_openxml.py b/pcmdi_metrics/io/xcdat_openxml.py index 0eff8eba5..c97a6786b 100644 --- a/pcmdi_metrics/io/xcdat_openxml.py +++ b/pcmdi_metrics/io/xcdat_openxml.py @@ -1,39 +1,46 @@ -import xmltodict import glob import os import sys -import xcdat as xc + +import xcdat +import xmltodict -def xcdat_open(infile): +def xcdat_open(infile, data_var=None): """ Parameter --------- - infile: + infile: list of string, or string File(s) to open using xcdat + data_var: + (Optional[str], optional) – The key of the non-bounds data variable to keep in the Dataset, alongside any existing bounds data variables, by default None. + Output ------ ds: xcdat dataset - """ + """ if isinstance(infile, list): - ds = xcdat.open_mfdataset(infile) + ds = xcdat.open_mfdataset(infile, data_var=data_var) else: if infile.split('.')[-1].lower() == 'xml': - ds = xcdat_openxml(infile) + ds = xcdat_openxml(infile, data_var=data_var) else: - ds = xcdat.open_dataset(infile) + ds = xcdat.open_dataset(infile, data_var=data_var) return ds -def xcdat_openxml(xmlfile): +def xcdat_openxml(xmlfile, data_var=None): """ Parameter --------- - infile: + infile: xml file to open using xcdat + data_var: + (Optional[str], optional) – The key of the non-bounds data variable to keep in the Dataset, alongside any existing bounds data variables, by default None. + Output ------ ds: @@ -46,10 +53,10 @@ def xcdat_openxml(xmlfile): doc = xmltodict.parse(fd.read()) ncfile_list = glob.glob(os.path.join(doc['dataset']['@directory'], '*.nc')) - + if len(ncfile_list) > 1: - ds = xc.open_mfdataset(ncfile_list) + ds = xcdat.open_mfdataset(ncfile_list, data_var=data_var) else: - ds = xc.open_dataset(ncfile_list[0]) - + ds = xcdat.open_dataset(ncfile_list[0], data_var=data_var) + return ds diff --git a/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py b/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py index 296389e23..e7870a452 100644 --- a/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py +++ b/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py @@ -2,10 +2,10 @@ import datetime import os -import xcdat from genutil import StringConstructor import pcmdi_metrics +from pcmdi_metrics.io import xcdat_open def clim_calc_x(var, infile, outfile=None, outpath=None, outfilename=None, start=None, end=None): @@ -17,11 +17,12 @@ def clim_calc_x(var, infile, outfile=None, outpath=None, outfilename=None, start infilename = tmp[len(tmp) - 1] print("infilename is ", infilename) - d = xcdat.open_dataset(infile, data_var=var) + # d = xcdat.open_dataset(infile, data_var=var) + d = xcdat_open(infile, data_var=var) atts = d.attrs - print(type(d)) - print(atts) + print('type(d):', type(d)) + print('atts:', atts) # CONTROL OF OUTPUT DIRECTORY AND FILE out = outfile @@ -40,9 +41,9 @@ def clim_calc_x(var, infile, outfile=None, outpath=None, outfilename=None, start if (start is None) and (end is None): # DEFAULT CLIM - BASED ON ENTIRE TIME SERIES start_yr_str = str(int(c["time.year"][0])) - start_mo_str = str(int(c["time.month"][0])) + start_mo_str = str(int(c["time.month"][0]).zfill(2)) end_yr_str = str(int(c["time.year"][len(c) - 1])) - end_mo_str = str(int(c["time.month"][len(c) - 1])) + end_mo_str = str(int(c["time.month"][len(c) - 1]).zfill(2)) start_yr = int(start_yr_str) start_mo = int(start_mo_str) end_yr = int(end_yr_str) @@ -50,14 +51,14 @@ def clim_calc_x(var, infile, outfile=None, outpath=None, outfilename=None, start print(start_yr_str, start_mo_str, end_yr_str, end_mo_str) else: # USER DEFINED PERIOD - start_mo = int(start.split("-")[1]) start_yr = int(start.split("-")[0]) - end_mo = int(end.split("-")[1]) + start_mo = int(start.split("-")[1]) end_yr = int(end.split("-")[0]) + end_mo = int(end.split("-")[1]) start_yr_str = str(start_yr) - start_mo_str = str(start_mo) + start_mo_str = str(start_mo).zfill(2) end_yr_str = str(end_yr) - end_mo_str = str(end_mo) + end_mo_str = str(end_mo).zfill(2) d = d.sel(time=slice(start_yr_str + '-' + start_mo_str + '-01', end_yr_str + '-' + end_mo_str + '-31')) @@ -101,10 +102,10 @@ def clim_calc_x(var, infile, outfile=None, outpath=None, outfilename=None, start ) if outfilename is not None: out = os.path.join(outdir, outfilename) - out = out.replace(".nc", addf) + out_season = out.replace(".nc", addf) - print("out is ", out) - d_clim_dict[s].to_netcdf(out) + print("out_season is ", out_season) + d_clim_dict[s].to_netcdf(out_season) if __name__ == "__main__": From 26761a576fe97921a8148d0dbbedfa76aabcfc93 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 20 Oct 2022 22:30:28 -0700 Subject: [PATCH 012/130] allow dask generating large_chunks to silence large chunk warnings --- .../pcmdi_compute_climatologies-xcdat.py | 17 ++++++----------- 1 file changed, 6 insertions(+), 11 deletions(-) diff --git a/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py b/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py index e7870a452..b092ba9cb 100644 --- a/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py +++ b/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py @@ -1,6 +1,7 @@ #!/usr/bin/env python import datetime import os +import dask from genutil import StringConstructor @@ -34,8 +35,7 @@ def clim_calc_x(var, infile, outfile=None, outpath=None, outfilename=None, start print("outdir is ", outdir) - c = d.time - # print(c) + c = d.time # coordinate for time # CLIM PERIOD if (start is None) and (end is None): @@ -62,17 +62,16 @@ def clim_calc_x(var, infile, outfile=None, outpath=None, outfilename=None, start d = d.sel(time=slice(start_yr_str + '-' + start_mo_str + '-01', end_yr_str + '-' + end_mo_str + '-31')) - # print(d) print("start_yr_str is ", start_yr_str) print("start_mo_str is ", start_mo_str) print("end_yr_str is ", end_yr_str) print("end_mo_str is ", end_mo_str) - start_mo_str = start_mo_str.zfill(2) - end_mo_str = end_mo_str.zfill(2) - + # Calculate climatology + dask.config.set(**{'array.slicing.split_large_chunks': True}) d_clim = d.temporal.climatology(var, freq="season", weighted=True, season_config={"dec_mode": "DJF", "drop_incomplete_djf": True},) + d_ac = d.temporal.climatology(var, freq="month", weighted=True) d_clim_dict = dict() @@ -80,10 +79,6 @@ def clim_calc_x(var, infile, outfile=None, outpath=None, outfilename=None, start d_clim_dict['MAM'] = d_clim.isel(time=1) d_clim_dict['JJA'] = d_clim.isel(time=2) d_clim_dict['SON'] = d_clim.isel(time=3) - - d_ac = d.temporal.climatology(var, freq="month", weighted=True) - print('below ac') - d_clim_dict['AC'] = d_ac for s in ["AC", "DJF", "MAM", "JJA", "SON"]: @@ -104,7 +99,7 @@ def clim_calc_x(var, infile, outfile=None, outpath=None, outfilename=None, start out = os.path.join(outdir, outfilename) out_season = out.replace(".nc", addf) - print("out_season is ", out_season) + print("output file is", out_season) d_clim_dict[s].to_netcdf(out_season) From 21fb5860bfa6ca37ba6f0a91acaf240605272370 Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Thu, 20 Oct 2022 22:44:31 -0700 Subject: [PATCH 013/130] remove cdms code from the part to generate annual cycle files; results compared between cdms and xcdat, and they are consistent --- .../pcmdi_compute_climatologies-xcdat.py | 167 ----------- .../scripts/pcmdi_compute_climatologies.py | 274 ++++++++---------- setup.py | 1 - 3 files changed, 115 insertions(+), 327 deletions(-) delete mode 100644 pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py diff --git a/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py b/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py deleted file mode 100644 index b092ba9cb..000000000 --- a/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py +++ /dev/null @@ -1,167 +0,0 @@ -#!/usr/bin/env python -import datetime -import os -import dask - -from genutil import StringConstructor - -import pcmdi_metrics -from pcmdi_metrics.io import xcdat_open - - -def clim_calc_x(var, infile, outfile=None, outpath=None, outfilename=None, start=None, end=None): - - ver = datetime.datetime.now().strftime("v%Y%m%d") - print('time is ', ver) - - tmp = infile.split("/") - infilename = tmp[len(tmp) - 1] - print("infilename is ", infilename) - - # d = xcdat.open_dataset(infile, data_var=var) - d = xcdat_open(infile, data_var=var) - atts = d.attrs - - print('type(d):', type(d)) - print('atts:', atts) - - # CONTROL OF OUTPUT DIRECTORY AND FILE - out = outfile - if outpath is None: - outdir = os.path.dirname(outfile) - else: - outdir = outpath - os.makedirs(outdir, exist_ok=True) - - print("outdir is ", outdir) - - c = d.time # coordinate for time - - # CLIM PERIOD - if (start is None) and (end is None): - # DEFAULT CLIM - BASED ON ENTIRE TIME SERIES - start_yr_str = str(int(c["time.year"][0])) - start_mo_str = str(int(c["time.month"][0]).zfill(2)) - end_yr_str = str(int(c["time.year"][len(c) - 1])) - end_mo_str = str(int(c["time.month"][len(c) - 1]).zfill(2)) - start_yr = int(start_yr_str) - start_mo = int(start_mo_str) - end_yr = int(end_yr_str) - end_mo = int(end_mo_str) - print(start_yr_str, start_mo_str, end_yr_str, end_mo_str) - else: - # USER DEFINED PERIOD - start_yr = int(start.split("-")[0]) - start_mo = int(start.split("-")[1]) - end_yr = int(end.split("-")[0]) - end_mo = int(end.split("-")[1]) - start_yr_str = str(start_yr) - start_mo_str = str(start_mo).zfill(2) - end_yr_str = str(end_yr) - end_mo_str = str(end_mo).zfill(2) - - d = d.sel(time=slice(start_yr_str + '-' + start_mo_str + '-01', - end_yr_str + '-' + end_mo_str + '-31')) - - print("start_yr_str is ", start_yr_str) - print("start_mo_str is ", start_mo_str) - print("end_yr_str is ", end_yr_str) - print("end_mo_str is ", end_mo_str) - - # Calculate climatology - dask.config.set(**{'array.slicing.split_large_chunks': True}) - d_clim = d.temporal.climatology(var, freq="season", weighted=True, season_config={"dec_mode": "DJF", "drop_incomplete_djf": True},) - d_ac = d.temporal.climatology(var, freq="month", weighted=True) - - d_clim_dict = dict() - - d_clim_dict['DJF'] = d_clim.isel(time=0) - d_clim_dict['MAM'] = d_clim.isel(time=1) - d_clim_dict['JJA'] = d_clim.isel(time=2) - d_clim_dict['SON'] = d_clim.isel(time=3) - d_clim_dict['AC'] = d_ac - - for s in ["AC", "DJF", "MAM", "JJA", "SON"]: - addf = ( - "." - + start_yr_str - + start_mo_str - + "-" - + end_yr_str - + end_mo_str - + "." - + s - + "." - + ver - + ".nc" - ) - if outfilename is not None: - out = os.path.join(outdir, outfilename) - out_season = out.replace(".nc", addf) - - print("output file is", out_season) - d_clim_dict[s].to_netcdf(out_season) - - -if __name__ == "__main__": - - ver = datetime.datetime.now().strftime("v%Y%m%d") - - P = pcmdi_metrics.driver.pmp_parser.PMPMetricsParser() - - P.add_argument( - "--vars", dest="vars", help="List of variables", nargs="+", required=False - ) - P.add_argument("--infile", dest="infile", help="Defines infile", required=False) - P.add_argument( - "--outfile", dest="outfile", help="Defines output path and filename", required=False - ) - P.add_argument("--outpath", dest="outpath", help="Defines outpath only", required=False) - P.add_argument( - "--outfilename", - dest="outfilename", - help="Defines out filename only", - required=False, - ) - P.add_argument( - "--start", dest="start", help="Defines start year and month", required=False - ) - P.add_argument("--end", dest="end", help="Defines end year and month", required=False) - - args = P.get_parameter() - - infile_template = args.infile - outfile_template = args.outfile - outpath_template = args.outpath - outfilename_template = args.outfilename - varlist = args.vars - start = args.start - end = args.end - - print("start and end are ", start, " ", end) - print("variable list: ", varlist) - - InFile = StringConstructor(infile_template) - OutFile = StringConstructor(outfile_template) - OutFileName = StringConstructor(outfilename_template) - OutPath = StringConstructor(outpath_template) - - for var in varlist: - # Build filenames - InFile.variable = var - OutFile.variable = var - OutFileName.variable = var - OutPath.variable = var - infile = InFile() - outfile = OutFile() - outfilename = OutFileName() - outpath = OutPath() - - print('var:', var) - print('infile:', infile) - print('outfile:', outfile) - print('outfilename:', outfilename) - print('outpath:', outpath) - - # calculate climatologies for this variable - clim_calc_x(var, infile, outfile, outpath, outfilename, start, end) diff --git a/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies.py b/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies.py index 44d8ae3a2..9cd3c9ea0 100644 --- a/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies.py +++ b/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies.py @@ -1,105 +1,87 @@ #!/usr/bin/env python import datetime +import os +import dask -import cdms2 from genutil import StringConstructor import pcmdi_metrics +from pcmdi_metrics.io import xcdat_open -ver = datetime.datetime.now().strftime("v%Y%m%d") -cdms2.setNetcdfShuffleFlag(0) -cdms2.setNetcdfDeflateFlag(0) -cdms2.setNetcdfDeflateLevelFlag(0) - -# - - -def clim_calc(var, infile, outfile, outdir, outfilename, start, end): - import datetime - import os - - import cdms2 - import cdtime - import cdutil +def clim_calc(var, infile, outfile=None, outpath=None, outfilename=None, start=None, end=None): ver = datetime.datetime.now().strftime("v%Y%m%d") + print('time is ', ver) - lf = infile - tmp = lf.split("/") + tmp = infile.split("/") infilename = tmp[len(tmp) - 1] print("infilename is ", infilename) - f = cdms2.open(lf) - atts = f.listglobal() - outfd = outfile - - # CONTROL OF OUTPUT DIRECTORY AND FILE - - # outdir AND outfilename PROVIDED BY USER - if outdir is not None and outfilename is not None: - outfd = outdir + outfilename + # d = xcdat.open_dataset(infile, data_var=var) + d = xcdat_open(infile, data_var=var) + atts = d.attrs - # outdir PROVIDED BY USER, BUT filename IS TAKEN FROM infilename WITH CLIM MODIFICATIONS SUFFIX ADDED BELOW - if outdir is not None and outfilename is None: - outfd = outdir + "/" + infilename + print('type(d):', type(d)) + print('atts:', atts) - if outdir is None and outfilename is None: - outfd = outfile + # CONTROL OF OUTPUT DIRECTORY AND FILE + out = outfile + if outpath is None: + outdir = os.path.dirname(outfile) + else: + outdir = outpath + os.makedirs(outdir, exist_ok=True) - print("outfd is ", outfd) print("outdir is ", outdir) - seperate_clims = "y" + c = d.time # coordinate for time - # DEFAULT CLIM - BASED ON ENTIRE TIME SERIES + # CLIM PERIOD if (start is None) and (end is None): - d = f(var) - t = d.getTime() - c = t.asComponentTime() - start_yr_str = str(c[0].year) - start_mo_str = str(c[0].month) - end_yr_str = str(c[len(c) - 1].year) - end_mo_str = str(c[len(c) - 1].month) + # DEFAULT CLIM - BASED ON ENTIRE TIME SERIES + start_yr_str = str(int(c["time.year"][0])) + start_mo_str = str(int(c["time.month"][0]).zfill(2)) + end_yr_str = str(int(c["time.year"][len(c) - 1])) + end_mo_str = str(int(c["time.month"][len(c) - 1]).zfill(2)) start_yr = int(start_yr_str) start_mo = int(start_mo_str) end_yr = int(end_yr_str) end_mo = int(end_mo_str) - - # USER DEFINED PERIOD + print(start_yr_str, start_mo_str, end_yr_str, end_mo_str) else: - start_mo = int(start.split("-")[1]) + # USER DEFINED PERIOD start_yr = int(start.split("-")[0]) - end_mo = int(end.split("-")[1]) + start_mo = int(start.split("-")[1]) end_yr = int(end.split("-")[0]) + end_mo = int(end.split("-")[1]) start_yr_str = str(start_yr) - start_mo_str = str(start_mo) + start_mo_str = str(start_mo).zfill(2) end_yr_str = str(end_yr) - end_mo_str = str(end_mo) + end_mo_str = str(end_mo).zfill(2) - d = f( - var, time=(cdtime.comptime(start_yr, start_mo), cdtime.comptime(end_yr, end_mo)) - ) + d = d.sel(time=slice(start_yr_str + '-' + start_mo_str + '-01', + end_yr_str + '-' + end_mo_str + '-31')) print("start_yr_str is ", start_yr_str) + print("start_mo_str is ", start_mo_str) + print("end_yr_str is ", end_yr_str) + print("end_mo_str is ", end_mo_str) - if start_mo_str not in ["11", "12"]: - start_mo_str = "0" + start_mo_str - if end_mo_str not in ["11", "12"]: - end_mo_str = "0" + end_mo_str - - d_ac = cdutil.ANNUALCYCLE.climatology(d).astype("float32") - d_djf = cdutil.DJF.climatology(d)(squeeze=1).astype("float32") - d_jja = cdutil.JJA.climatology(d)(squeeze=1).astype("float32") - d_son = cdutil.SON.climatology(d)(squeeze=1).astype("float32") - d_mam = cdutil.MAM.climatology(d)(squeeze=1).astype("float32") + # Calculate climatology + dask.config.set(**{'array.slicing.split_large_chunks': True}) + d_clim = d.temporal.climatology(var, freq="season", weighted=True, season_config={"dec_mode": "DJF", "drop_incomplete_djf": True},) + d_ac = d.temporal.climatology(var, freq="month", weighted=True) - for v in [d_ac, d_djf, d_jja, d_son, d_mam]: + d_clim_dict = dict() - v.id = var + d_clim_dict['DJF'] = d_clim.isel(time=0) + d_clim_dict['MAM'] = d_clim.isel(time=1) + d_clim_dict['JJA'] = d_clim.isel(time=2) + d_clim_dict['SON'] = d_clim.isel(time=3) + d_clim_dict['AC'] = d_ac for s in ["AC", "DJF", "MAM", "JJA", "SON"]: - addf = ( "." + start_yr_str @@ -113,99 +95,73 @@ def clim_calc(var, infile, outfile, outdir, outfilename, start, end): + ver + ".nc" ) + if outfilename is not None: + out = os.path.join(outdir, outfilename) + out_season = out.replace(".nc", addf) + + print("output file is", out_season) + d_clim_dict[s].to_netcdf(out_season) - if seperate_clims == "y": - print("outfd is ", outfd) - out = outfd - out = out.replace(".nc", addf) - out = out.replace(".xml", addf) - print("out is ", out) - - if seperate_clims == "n": - out = outfd.replace("climo.nc", s + ".nc") - if s == "AC": - do = d_ac - if s == "DJF": - do = d_djf - if s == "MAM": - do = d_mam - if s == "JJA": - do = d_jja - if s == "SON": - do = d_son - do.id = var - - # MKDIRS AS NEEDED - lst = outfd.split("/") - s = "/" - for ll in range(len(lst)): - d = s.join(lst[0 : ll + 1]) - try: - os.mkdir(d) - except OSError: - pass - - g = cdms2.open(out, "w+") - g.write(do) - - for att in atts: - setattr(g, att, f.getglobal(att)) - g.close() - print(do.shape, " ", d_ac.shape, " ", out) - f.close() - return - - -####################################################################### - - -P = pcmdi_metrics.driver.pmp_parser.PMPMetricsParser() - - -P.add_argument( - "--vars", dest="vars", help="List of variables", nargs="+", required=False) -P.add_argument("--infile", dest="infile", help="Defines infile", required=False) -P.add_argument( "--outfile", dest="outfile", help="Defines output path and filename", required=False) -P.add_argument("--outpath", dest="outpath", help="Defines outpath only", required=False) -P.add_argument( - "--outfilename", - dest="outfilename", - help="Defines out filename only", - required=False, -) -P.add_argument( - "--start", dest="start", help="Defines start year and month", required=False) - -P.add_argument("--end", dest="end", help="Defines end year and month", required=False) - -args = P.get_parameter() - -infile_template = args.infile -outfile_template = args.outfile -outpath_template = args.outpath -outfilename_template = args.outfilename -varlist = args.vars -start = args.start -end = args.end - -print("start and end are ", start, " ", end) -print("variable list: ", varlist) - -InFile = StringConstructor(infile_template) -OutFile = StringConstructor(outfile_template) -OutFileName = StringConstructor(outfilename_template) -OutPath = StringConstructor(outpath_template) - -for var in varlist: - # Build filenames - InFile.variable = var - OutFile.variable = var - OutFileName.variable = var - OutPath.variable = var - infile = InFile() - outfile = OutFile() - outfilename = OutFileName() - outpath = OutPath() - - # calculate climatologies for this variable - clim_calc(var, infile, outfile, outpath, outfilename, start, end) + +if __name__ == "__main__": + + ver = datetime.datetime.now().strftime("v%Y%m%d") + + P = pcmdi_metrics.driver.pmp_parser.PMPMetricsParser() + + P.add_argument( + "--vars", dest="vars", help="List of variables", nargs="+", required=False + ) + P.add_argument("--infile", dest="infile", help="Defines infile", required=False) + P.add_argument( + "--outfile", dest="outfile", help="Defines output path and filename", required=False + ) + P.add_argument("--outpath", dest="outpath", help="Defines outpath only", required=False) + P.add_argument( + "--outfilename", + dest="outfilename", + help="Defines out filename only", + required=False, + ) + P.add_argument( + "--start", dest="start", help="Defines start year and month", required=False + ) + P.add_argument("--end", dest="end", help="Defines end year and month", required=False) + + args = P.get_parameter() + + infile_template = args.infile + outfile_template = args.outfile + outpath_template = args.outpath + outfilename_template = args.outfilename + varlist = args.vars + start = args.start + end = args.end + + print("start and end are ", start, " ", end) + print("variable list: ", varlist) + + InFile = StringConstructor(infile_template) + OutFile = StringConstructor(outfile_template) + OutFileName = StringConstructor(outfilename_template) + OutPath = StringConstructor(outpath_template) + + for var in varlist: + # Build filenames + InFile.variable = var + OutFile.variable = var + OutFileName.variable = var + OutPath.variable = var + infile = InFile() + outfile = OutFile() + outfilename = OutFileName() + outpath = OutPath() + + print('var:', var) + print('infile:', infile) + print('outfile:', outfile) + print('outfilename:', outfilename) + print('outpath:', outpath) + + # calculate climatologies for this variable + clim_calc(var, infile, outfile, outpath, outfilename, start, end) diff --git a/setup.py b/setup.py index ee107d35d..e323ed341 100755 --- a/setup.py +++ b/setup.py @@ -66,7 +66,6 @@ scripts = [ "pcmdi_metrics/pcmdi/scripts/mean_climate_driver.py", "pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies.py", - "pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-xcdat.py", "pcmdi_metrics/misc/scripts/parallelize_driver.py", "pcmdi_metrics/misc/scripts/get_pmp_data.py", "pcmdi_metrics/monsoon_wang/scripts/mpindex_compute.py", From 6e07563823bfbba8dccad59ac0a03735d5fd91bd Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Thu, 20 Oct 2022 22:50:13 -0700 Subject: [PATCH 014/130] clean up --- pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies.py b/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies.py index 9cd3c9ea0..ad81bd024 100644 --- a/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies.py +++ b/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies.py @@ -14,12 +14,11 @@ def clim_calc(var, infile, outfile=None, outpath=None, outfilename=None, start=N ver = datetime.datetime.now().strftime("v%Y%m%d") print('time is ', ver) - tmp = infile.split("/") - infilename = tmp[len(tmp) - 1] + infilename = infile.split("/")[-1] print("infilename is ", infilename) - # d = xcdat.open_dataset(infile, data_var=var) - d = xcdat_open(infile, data_var=var) + # d = xcdat.open_dataset(infile, data_var=var) # use xcdat function directly + d = xcdat_open(infile, data_var=var) # wrapper of xcdat open functions to enable using xml atts = d.attrs print('type(d):', type(d)) From 3151092493d20ca641036667af9297d689ea0e69 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 20 Oct 2022 22:52:33 -0700 Subject: [PATCH 015/130] add description as comment --- pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies.py b/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies.py index ad81bd024..d718bfc43 100644 --- a/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies.py +++ b/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies.py @@ -99,7 +99,7 @@ def clim_calc(var, infile, outfile=None, outpath=None, outfilename=None, start=N out_season = out.replace(".nc", addf) print("output file is", out_season) - d_clim_dict[s].to_netcdf(out_season) + d_clim_dict[s].to_netcdf(out_season) # global attributes are automatically saved as well if __name__ == "__main__": From f07d8a9d8e621fbc6f8284c1e5f7a329c4820b17 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 20 Oct 2022 23:05:07 -0700 Subject: [PATCH 016/130] clean up: simplify repeating lines --- .../scripts/pcmdi_compute_climatologies.py | 23 ++++++++----------- 1 file changed, 10 insertions(+), 13 deletions(-) diff --git a/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies.py b/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies.py index d718bfc43..148ed962d 100644 --- a/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies.py +++ b/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies.py @@ -39,26 +39,23 @@ def clim_calc(var, infile, outfile=None, outpath=None, outfilename=None, start=N # CLIM PERIOD if (start is None) and (end is None): # DEFAULT CLIM - BASED ON ENTIRE TIME SERIES - start_yr_str = str(int(c["time.year"][0])) - start_mo_str = str(int(c["time.month"][0]).zfill(2)) - end_yr_str = str(int(c["time.year"][len(c) - 1])) - end_mo_str = str(int(c["time.month"][len(c) - 1]).zfill(2)) - start_yr = int(start_yr_str) - start_mo = int(start_mo_str) - end_yr = int(end_yr_str) - end_mo = int(end_mo_str) - print(start_yr_str, start_mo_str, end_yr_str, end_mo_str) + start_yr = int(d.time["time.year"][0]) + start_mo = int(d.time["time.month"][0]) + end_yr = int(d.time["time.year"][-1]) + end_mo = int(d.time["time.month"][-1]) else: # USER DEFINED PERIOD start_yr = int(start.split("-")[0]) start_mo = int(start.split("-")[1]) end_yr = int(end.split("-")[0]) end_mo = int(end.split("-")[1]) - start_yr_str = str(start_yr) - start_mo_str = str(start_mo).zfill(2) - end_yr_str = str(end_yr) - end_mo_str = str(end_mo).zfill(2) + start_yr_str = str(start_yr) + start_mo_str = str(start_mo).zfill(2) + end_yr_str = str(end_yr) + end_mo_str = str(end_mo).zfill(2) + + # Subset given time period d = d.sel(time=slice(start_yr_str + '-' + start_mo_str + '-01', end_yr_str + '-' + end_mo_str + '-31')) From acc86e23aa5caf8bf38e3a170b8eef3785edf794 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Tue, 25 Oct 2022 12:01:22 -0700 Subject: [PATCH 017/130] Exclude yaml and script as default from pmp output json --- pcmdi_metrics/io/base.py | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/pcmdi_metrics/io/base.py b/pcmdi_metrics/io/base.py index f44a674d7..feb4cecee 100755 --- a/pcmdi_metrics/io/base.py +++ b/pcmdi_metrics/io/base.py @@ -158,7 +158,16 @@ def __call__(self): def read(self): pass - def write(self, data, type="json", mode="w", *args, **kwargs): + def write( + self, + data, + type="json", + mode="w", + include_YAML=False, + include_script=False, + *args, + **kwargs, + ): self.type = type.lower() file_name = self() dir_path = os.path.split(file_name)[0] @@ -197,9 +206,13 @@ def write(self, data, type="json", mode="w", *args, **kwargs): f = open(file_name, "w") update_dict(out_dict, data) if "yaml" in out_dict["provenance"]["conda"]: - out_dict["YAML"] = out_dict["provenance"]["conda"]["yaml"] + if include_YAML: + out_dict["YAML"] = out_dict["provenance"]["conda"]["yaml"] del out_dict["provenance"]["conda"]["yaml"] - # out_dict = OrderedDict({"provenance": generateProvenance()}) + + if not include_script: + if "script" in out_dict["provenance"].keys(): + del out_dict["provenance"]["script"] json.dump(out_dict, f, cls=CDMSDomainsEncoder, *args, **kwargs) f.close() From 0da306aeb2094f4b32f1708a50c2a2cc39f96d35 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Wed, 26 Oct 2022 17:17:40 -0700 Subject: [PATCH 018/130] re-org --- pcmdi_metrics/driver/outputmetrics.py | 3 +++ pcmdi_metrics/pcmdi/{ => lib}/__init__.py | 0 pcmdi_metrics/pcmdi/{ => lib}/annual_mean.py | 0 pcmdi_metrics/pcmdi/{ => lib}/bias_xy.py | 0 pcmdi_metrics/pcmdi/{ => lib}/bias_xyt.py | 0 pcmdi_metrics/pcmdi/{ => lib}/cor_xy.py | 0 pcmdi_metrics/pcmdi/{ => lib}/cor_xyt.py | 0 pcmdi_metrics/{driver => pcmdi/lib}/dataset.py | 0 pcmdi_metrics/pcmdi/{ => lib}/io.py | 0 .../pcmdi/{ => lib}/mean_climate_metrics_calculations.py | 0 .../pcmdi/{ => lib}/mean_climate_metrics_driver.py | 9 +++++++++ pcmdi_metrics/pcmdi/{ => lib}/mean_xy.py | 0 pcmdi_metrics/pcmdi/{ => lib}/meanabs_xy.py | 0 pcmdi_metrics/pcmdi/{ => lib}/meanabs_xyt.py | 0 pcmdi_metrics/{driver => pcmdi/lib}/model.py | 0 pcmdi_metrics/pcmdi/{ => lib}/pmp_parser.py | 0 pcmdi_metrics/pcmdi/{ => lib}/rms_0.py | 0 pcmdi_metrics/pcmdi/{ => lib}/rms_xy.py | 0 pcmdi_metrics/pcmdi/{ => lib}/rms_xyt.py | 0 pcmdi_metrics/pcmdi/{ => lib}/rmsc_xy.py | 0 pcmdi_metrics/pcmdi/{ => lib}/seasonal_mean.py | 0 pcmdi_metrics/pcmdi/{ => lib}/std_xy.py | 0 pcmdi_metrics/pcmdi/{ => lib}/std_xyt.py | 0 pcmdi_metrics/pcmdi/{ => lib}/zonal_mean.py | 0 pcmdi_metrics/version.py | 6 +++--- share/DefArgsCIA.json | 2 +- 26 files changed, 16 insertions(+), 4 deletions(-) rename pcmdi_metrics/pcmdi/{ => lib}/__init__.py (100%) rename pcmdi_metrics/pcmdi/{ => lib}/annual_mean.py (100%) rename pcmdi_metrics/pcmdi/{ => lib}/bias_xy.py (100%) rename pcmdi_metrics/pcmdi/{ => lib}/bias_xyt.py (100%) rename pcmdi_metrics/pcmdi/{ => lib}/cor_xy.py (100%) rename pcmdi_metrics/pcmdi/{ => lib}/cor_xyt.py (100%) rename pcmdi_metrics/{driver => pcmdi/lib}/dataset.py (100%) rename pcmdi_metrics/pcmdi/{ => lib}/io.py (100%) rename pcmdi_metrics/pcmdi/{ => lib}/mean_climate_metrics_calculations.py (100%) rename pcmdi_metrics/pcmdi/{ => lib}/mean_climate_metrics_driver.py (96%) rename pcmdi_metrics/pcmdi/{ => lib}/mean_xy.py (100%) rename pcmdi_metrics/pcmdi/{ => lib}/meanabs_xy.py (100%) rename pcmdi_metrics/pcmdi/{ => lib}/meanabs_xyt.py (100%) rename pcmdi_metrics/{driver => pcmdi/lib}/model.py (100%) rename pcmdi_metrics/pcmdi/{ => lib}/pmp_parser.py (100%) rename pcmdi_metrics/pcmdi/{ => lib}/rms_0.py (100%) rename pcmdi_metrics/pcmdi/{ => lib}/rms_xy.py (100%) rename pcmdi_metrics/pcmdi/{ => lib}/rms_xyt.py (100%) rename pcmdi_metrics/pcmdi/{ => lib}/rmsc_xy.py (100%) rename pcmdi_metrics/pcmdi/{ => lib}/seasonal_mean.py (100%) rename pcmdi_metrics/pcmdi/{ => lib}/std_xy.py (100%) rename pcmdi_metrics/pcmdi/{ => lib}/std_xyt.py (100%) rename pcmdi_metrics/pcmdi/{ => lib}/zonal_mean.py (100%) diff --git a/pcmdi_metrics/driver/outputmetrics.py b/pcmdi_metrics/driver/outputmetrics.py index e69131ac0..1518dc610 100644 --- a/pcmdi_metrics/driver/outputmetrics.py +++ b/pcmdi_metrics/driver/outputmetrics.py @@ -139,7 +139,10 @@ def calculate_and_output_metrics(self, ref, test): raise RuntimeError("Need to skip model: %s" % test.obs_or_model) # Todo: Make this a fcn + print('jwlee-test-2-1, test().shape:', test().shape) + print('jwlee-test-2-2, test_data.shape:', test_data.shape) self.set_grid_in_metrics_dictionary(test_data) + print('jwlee-test-2-3, test_data.shape:', test_data.shape) if ref_data.shape != test_data.shape: raise RuntimeError( diff --git a/pcmdi_metrics/pcmdi/__init__.py b/pcmdi_metrics/pcmdi/lib/__init__.py similarity index 100% rename from pcmdi_metrics/pcmdi/__init__.py rename to pcmdi_metrics/pcmdi/lib/__init__.py diff --git a/pcmdi_metrics/pcmdi/annual_mean.py b/pcmdi_metrics/pcmdi/lib/annual_mean.py similarity index 100% rename from pcmdi_metrics/pcmdi/annual_mean.py rename to pcmdi_metrics/pcmdi/lib/annual_mean.py diff --git a/pcmdi_metrics/pcmdi/bias_xy.py b/pcmdi_metrics/pcmdi/lib/bias_xy.py similarity index 100% rename from pcmdi_metrics/pcmdi/bias_xy.py rename to pcmdi_metrics/pcmdi/lib/bias_xy.py diff --git a/pcmdi_metrics/pcmdi/bias_xyt.py b/pcmdi_metrics/pcmdi/lib/bias_xyt.py similarity index 100% rename from pcmdi_metrics/pcmdi/bias_xyt.py rename to pcmdi_metrics/pcmdi/lib/bias_xyt.py diff --git a/pcmdi_metrics/pcmdi/cor_xy.py b/pcmdi_metrics/pcmdi/lib/cor_xy.py similarity index 100% rename from pcmdi_metrics/pcmdi/cor_xy.py rename to pcmdi_metrics/pcmdi/lib/cor_xy.py diff --git a/pcmdi_metrics/pcmdi/cor_xyt.py b/pcmdi_metrics/pcmdi/lib/cor_xyt.py similarity index 100% rename from pcmdi_metrics/pcmdi/cor_xyt.py rename to pcmdi_metrics/pcmdi/lib/cor_xyt.py diff --git a/pcmdi_metrics/driver/dataset.py b/pcmdi_metrics/pcmdi/lib/dataset.py similarity index 100% rename from pcmdi_metrics/driver/dataset.py rename to pcmdi_metrics/pcmdi/lib/dataset.py diff --git a/pcmdi_metrics/pcmdi/io.py b/pcmdi_metrics/pcmdi/lib/io.py similarity index 100% rename from pcmdi_metrics/pcmdi/io.py rename to pcmdi_metrics/pcmdi/lib/io.py diff --git a/pcmdi_metrics/pcmdi/mean_climate_metrics_calculations.py b/pcmdi_metrics/pcmdi/lib/mean_climate_metrics_calculations.py similarity index 100% rename from pcmdi_metrics/pcmdi/mean_climate_metrics_calculations.py rename to pcmdi_metrics/pcmdi/lib/mean_climate_metrics_calculations.py diff --git a/pcmdi_metrics/pcmdi/mean_climate_metrics_driver.py b/pcmdi_metrics/pcmdi/lib/mean_climate_metrics_driver.py similarity index 96% rename from pcmdi_metrics/pcmdi/mean_climate_metrics_driver.py rename to pcmdi_metrics/pcmdi/lib/mean_climate_metrics_driver.py index 08dc47388..82f6a187a 100644 --- a/pcmdi_metrics/pcmdi/mean_climate_metrics_driver.py +++ b/pcmdi_metrics/pcmdi/lib/mean_climate_metrics_driver.py @@ -155,6 +155,8 @@ def run_reference_and_test_comparison(self): reference_data_set = self.parameter.reference_data_set test_data_set = self.parameter.test_data_set + print('jwlee-test-0, test_data_set:', test_data_set) + reference_data_set_is_obs = self.is_data_set_obs(reference_data_set) test_data_set_is_obs = self.is_data_set_obs(test_data_set) @@ -169,6 +171,9 @@ def run_reference_and_test_comparison(self): test_data_set, self.obs_dict, self.var ) + print('jwlee-test-1, test_data_set:', test_data_set) + print('jwlee-test-1, test_data_set_is_obs:', test_data_set_is_obs) + if len(reference_data_set) == 0: # We did not find any ref!!! raise RuntimeError("No reference dataset found!") @@ -192,6 +197,7 @@ def run_reference_and_test_comparison(self): ) self.output_metric.add_region(self.region) try: + print('jwlee-test-1.5, test_data_set_is_obs, test, self.parameter.test_data_path:', test_data_set_is_obs, test, self.parameter.test_data_path) tst = self.determine_obs_or_model( test_data_set_is_obs, test, self.parameter.test_data_path ) @@ -207,6 +213,8 @@ def run_reference_and_test_comparison(self): break try: + print('jwlee-test-2: type(self), ref, tst:', type(self), ref, tst) + print('jwlee-test-2: tst().shape:', tst().shape) self.output_metric.calculate_and_output_metrics(ref, tst) except RuntimeError: continue @@ -233,6 +241,7 @@ def is_data_set_obs(self, data_set): return data_set_is_obs def determine_obs_or_model(self, is_obs, ref_or_test, data_path): + print('jwlee-test-1.5-1: is_obs, ref_or_test, data_path:', is_obs, ref_or_test, data_path) """Actually create Observation or Module object based on if ref_or_test is an obs or model.""" if is_obs: diff --git a/pcmdi_metrics/pcmdi/mean_xy.py b/pcmdi_metrics/pcmdi/lib/mean_xy.py similarity index 100% rename from pcmdi_metrics/pcmdi/mean_xy.py rename to pcmdi_metrics/pcmdi/lib/mean_xy.py diff --git a/pcmdi_metrics/pcmdi/meanabs_xy.py b/pcmdi_metrics/pcmdi/lib/meanabs_xy.py similarity index 100% rename from pcmdi_metrics/pcmdi/meanabs_xy.py rename to pcmdi_metrics/pcmdi/lib/meanabs_xy.py diff --git a/pcmdi_metrics/pcmdi/meanabs_xyt.py b/pcmdi_metrics/pcmdi/lib/meanabs_xyt.py similarity index 100% rename from pcmdi_metrics/pcmdi/meanabs_xyt.py rename to pcmdi_metrics/pcmdi/lib/meanabs_xyt.py diff --git a/pcmdi_metrics/driver/model.py b/pcmdi_metrics/pcmdi/lib/model.py similarity index 100% rename from pcmdi_metrics/driver/model.py rename to pcmdi_metrics/pcmdi/lib/model.py diff --git a/pcmdi_metrics/pcmdi/pmp_parser.py b/pcmdi_metrics/pcmdi/lib/pmp_parser.py similarity index 100% rename from pcmdi_metrics/pcmdi/pmp_parser.py rename to pcmdi_metrics/pcmdi/lib/pmp_parser.py diff --git a/pcmdi_metrics/pcmdi/rms_0.py b/pcmdi_metrics/pcmdi/lib/rms_0.py similarity index 100% rename from pcmdi_metrics/pcmdi/rms_0.py rename to pcmdi_metrics/pcmdi/lib/rms_0.py diff --git a/pcmdi_metrics/pcmdi/rms_xy.py b/pcmdi_metrics/pcmdi/lib/rms_xy.py similarity index 100% rename from pcmdi_metrics/pcmdi/rms_xy.py rename to pcmdi_metrics/pcmdi/lib/rms_xy.py diff --git a/pcmdi_metrics/pcmdi/rms_xyt.py b/pcmdi_metrics/pcmdi/lib/rms_xyt.py similarity index 100% rename from pcmdi_metrics/pcmdi/rms_xyt.py rename to pcmdi_metrics/pcmdi/lib/rms_xyt.py diff --git a/pcmdi_metrics/pcmdi/rmsc_xy.py b/pcmdi_metrics/pcmdi/lib/rmsc_xy.py similarity index 100% rename from pcmdi_metrics/pcmdi/rmsc_xy.py rename to pcmdi_metrics/pcmdi/lib/rmsc_xy.py diff --git a/pcmdi_metrics/pcmdi/seasonal_mean.py b/pcmdi_metrics/pcmdi/lib/seasonal_mean.py similarity index 100% rename from pcmdi_metrics/pcmdi/seasonal_mean.py rename to pcmdi_metrics/pcmdi/lib/seasonal_mean.py diff --git a/pcmdi_metrics/pcmdi/std_xy.py b/pcmdi_metrics/pcmdi/lib/std_xy.py similarity index 100% rename from pcmdi_metrics/pcmdi/std_xy.py rename to pcmdi_metrics/pcmdi/lib/std_xy.py diff --git a/pcmdi_metrics/pcmdi/std_xyt.py b/pcmdi_metrics/pcmdi/lib/std_xyt.py similarity index 100% rename from pcmdi_metrics/pcmdi/std_xyt.py rename to pcmdi_metrics/pcmdi/lib/std_xyt.py diff --git a/pcmdi_metrics/pcmdi/zonal_mean.py b/pcmdi_metrics/pcmdi/lib/zonal_mean.py similarity index 100% rename from pcmdi_metrics/pcmdi/zonal_mean.py rename to pcmdi_metrics/pcmdi/lib/zonal_mean.py diff --git a/pcmdi_metrics/version.py b/pcmdi_metrics/version.py index 2cc5782b2..fe3d9624b 100644 --- a/pcmdi_metrics/version.py +++ b/pcmdi_metrics/version.py @@ -1,3 +1,3 @@ -__version__ = "v1.2.1" -__git_tag_describe__ = "v1.2.1-612-g6456beb" -__git_sha1__ = "6456beb877f95286aaf2609eb5c07d064cba83f9" +__version__ = 'v2.3.1' +__git_tag_describe__ = 'v2.3.1-26-gacc86e2' +__git_sha1__ = 'acc86e23aa5caf8bf38e3a170b8eef3785edf794' diff --git a/share/DefArgsCIA.json b/share/DefArgsCIA.json index 8507f33ba..cd33a055d 100644 --- a/share/DefArgsCIA.json +++ b/share/DefArgsCIA.json @@ -163,4 +163,4 @@ ], "help":"A list of variables to be processed" } -} +} \ No newline at end of file From f85e36d0da8066740567ff9374abb2811dabcde8 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Wed, 26 Oct 2022 17:20:13 -0700 Subject: [PATCH 019/130] add example param files --- .../pcmdi/param/basic_annual_cycle_param.py | 15 ++++ pcmdi_metrics/pcmdi/param/basic_param.py | 68 +++++++++++++++++++ 2 files changed, 83 insertions(+) create mode 100644 pcmdi_metrics/pcmdi/param/basic_annual_cycle_param.py create mode 100644 pcmdi_metrics/pcmdi/param/basic_param.py diff --git a/pcmdi_metrics/pcmdi/param/basic_annual_cycle_param.py b/pcmdi_metrics/pcmdi/param/basic_annual_cycle_param.py new file mode 100644 index 000000000..0222863e7 --- /dev/null +++ b/pcmdi_metrics/pcmdi/param/basic_annual_cycle_param.py @@ -0,0 +1,15 @@ +# VARIABLES TO USE +vars = ['pr'] +#vars = ['ua', 'ta'] +vars = ['pr', 'ua', 'ta'] + +# START AND END DATES FOR CLIMATOLOGY +start = '1981-01' +#end = '1983-12' +end = '2005-12' + +# INPUT DATASET - CAN BE MODEL OR OBSERVATIONS +infile = '/work/lee1043/ESGF/E3SMv2/atmos/mon/cmip6.E3SMv2.historical.r1i1p1f1.mon.%(variable).xml' + +# DIRECTORY WHERE TO PUT RESULTS +outfile = 'clim/cmip6.historical.E3SMv2.r1i1p1.mon.%(variable).nc' diff --git a/pcmdi_metrics/pcmdi/param/basic_param.py b/pcmdi_metrics/pcmdi/param/basic_param.py new file mode 100644 index 000000000..8f8db45f1 --- /dev/null +++ b/pcmdi_metrics/pcmdi/param/basic_param.py @@ -0,0 +1,68 @@ +import os + +# +# OPTIONS ARE SET BY USER IN THIS FILE AS INDICATED BELOW BY: +# +# + +# RUN IDENTIFICATION +# DEFINES A SUBDIRECTORY TO METRICS OUTPUT RESULTS SO MULTIPLE CASES CAN +# BE COMPARED +case_id = 'v20221025' + +# LIST OF MODEL VERSIONS TO BE TESTED - WHICH ARE EXPECTED TO BE PART OF +# CLIMATOLOGY FILENAME +test_data_set = ['E3SMv2'] + + +# VARIABLES TO USE +#vars = ['pr', 'ua_850'] +vars = ['pr'] + + +# Observations to use at the moment "default" or "alternate" +#reference_data_set = ['all'] +reference_data_set = ['default'] +#ext = '.nc' + +# INTERPOLATION OPTIONS +target_grid = '2.5x2.5' # OPTIONS: '2.5x2.5' or an actual cdms2 grid object +regrid_tool = 'regrid2' # 'regrid2' # OPTIONS: 'regrid2','esmf' +# OPTIONS: 'linear','conservative', only if tool is esmf +regrid_method = 'linear' +regrid_tool_ocn = 'esmf' # OPTIONS: "regrid2","esmf" +# OPTIONS: 'linear','conservative', only if tool is esmf +regrid_method_ocn = 'linear' + +# SAVE INTERPOLATED MODEL CLIMATOLOGIES ? +save_test_clims = True # True or False + +## DIRECTORY WHERE TO PUT INTERPOLATED MODELS' CLIMATOLOGIES +test_clims_interpolated_output = './interpolated_model_clims' + + +# Templates for climatology files +# %(param) will subsitute param with values in this file +filename_template = "cmip6.historical.E3SMv2.r1i1p1.mon.%(variable).198101-200512.AC.v20221020.nc" + +# filename template for landsea masks ('sftlf') +sftlf_filename_template = "sftlf_fx_E3SM-1-0_historical_r1i1p1f1_gr.nc" + +generate_sftlf = False # if land surface type mask cannot be found, generate one + +# Region +regions = {"pr": ["global"], + "ua_850": ["global"] + } + +# ROOT PATH FOR MODELS CLIMATOLOGIES +#test_data_path = '/work/lee1043/ESGF/E3SMv2/atmos/mon' +test_data_path = './clim' +# ROOT PATH FOR OBSERVATIONS +# Note that atm/mo/%(variable)/ac will be added to this +reference_data_path = '/p/user_pub/PCMDIobs/obs4MIPs_clims' + +# DIRECTORY WHERE TO PUT RESULTS +metrics_output_path = os.path.join( + 'output', + "%(case_id)") From 8ac56ad2fca88a0ee5e45e632a7dbe4045303d66 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Wed, 26 Oct 2022 17:26:44 -0700 Subject: [PATCH 020/130] re-org files --- pcmdi_metrics/{driver => pcmdi/lib_driver}/__init__.py | 0 pcmdi_metrics/pcmdi/{lib => lib_driver}/dataset.py | 0 pcmdi_metrics/pcmdi/{lib => lib_driver}/model.py | 0 pcmdi_metrics/{driver => pcmdi/lib_driver}/observation.py | 0 pcmdi_metrics/{driver => pcmdi/lib_driver}/outputmetrics.py | 0 pcmdi_metrics/{driver => pcmdi/lib_driver}/pmp_parameter.py | 0 pcmdi_metrics/{driver => pcmdi/lib_driver}/pmp_parser.py | 0 pcmdi_metrics/pcmdi/{scripts => }/mean_climate_driver.py | 0 pcmdi_metrics/pcmdi/{scripts => }/pcmdi_compute_climatologies.py | 0 9 files changed, 0 insertions(+), 0 deletions(-) rename pcmdi_metrics/{driver => pcmdi/lib_driver}/__init__.py (100%) rename pcmdi_metrics/pcmdi/{lib => lib_driver}/dataset.py (100%) rename pcmdi_metrics/pcmdi/{lib => lib_driver}/model.py (100%) rename pcmdi_metrics/{driver => pcmdi/lib_driver}/observation.py (100%) rename pcmdi_metrics/{driver => pcmdi/lib_driver}/outputmetrics.py (100%) rename pcmdi_metrics/{driver => pcmdi/lib_driver}/pmp_parameter.py (100%) rename pcmdi_metrics/{driver => pcmdi/lib_driver}/pmp_parser.py (100%) rename pcmdi_metrics/pcmdi/{scripts => }/mean_climate_driver.py (100%) rename pcmdi_metrics/pcmdi/{scripts => }/pcmdi_compute_climatologies.py (100%) diff --git a/pcmdi_metrics/driver/__init__.py b/pcmdi_metrics/pcmdi/lib_driver/__init__.py similarity index 100% rename from pcmdi_metrics/driver/__init__.py rename to pcmdi_metrics/pcmdi/lib_driver/__init__.py diff --git a/pcmdi_metrics/pcmdi/lib/dataset.py b/pcmdi_metrics/pcmdi/lib_driver/dataset.py similarity index 100% rename from pcmdi_metrics/pcmdi/lib/dataset.py rename to pcmdi_metrics/pcmdi/lib_driver/dataset.py diff --git a/pcmdi_metrics/pcmdi/lib/model.py b/pcmdi_metrics/pcmdi/lib_driver/model.py similarity index 100% rename from pcmdi_metrics/pcmdi/lib/model.py rename to pcmdi_metrics/pcmdi/lib_driver/model.py diff --git a/pcmdi_metrics/driver/observation.py b/pcmdi_metrics/pcmdi/lib_driver/observation.py similarity index 100% rename from pcmdi_metrics/driver/observation.py rename to pcmdi_metrics/pcmdi/lib_driver/observation.py diff --git a/pcmdi_metrics/driver/outputmetrics.py b/pcmdi_metrics/pcmdi/lib_driver/outputmetrics.py similarity index 100% rename from pcmdi_metrics/driver/outputmetrics.py rename to pcmdi_metrics/pcmdi/lib_driver/outputmetrics.py diff --git a/pcmdi_metrics/driver/pmp_parameter.py b/pcmdi_metrics/pcmdi/lib_driver/pmp_parameter.py similarity index 100% rename from pcmdi_metrics/driver/pmp_parameter.py rename to pcmdi_metrics/pcmdi/lib_driver/pmp_parameter.py diff --git a/pcmdi_metrics/driver/pmp_parser.py b/pcmdi_metrics/pcmdi/lib_driver/pmp_parser.py similarity index 100% rename from pcmdi_metrics/driver/pmp_parser.py rename to pcmdi_metrics/pcmdi/lib_driver/pmp_parser.py diff --git a/pcmdi_metrics/pcmdi/scripts/mean_climate_driver.py b/pcmdi_metrics/pcmdi/mean_climate_driver.py similarity index 100% rename from pcmdi_metrics/pcmdi/scripts/mean_climate_driver.py rename to pcmdi_metrics/pcmdi/mean_climate_driver.py diff --git a/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies.py b/pcmdi_metrics/pcmdi/pcmdi_compute_climatologies.py similarity index 100% rename from pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies.py rename to pcmdi_metrics/pcmdi/pcmdi_compute_climatologies.py From 4e887f886ed1d9d665bd89eff08fb2d0ef1a966f Mon Sep 17 00:00:00 2001 From: lee1043 Date: Wed, 26 Oct 2022 17:35:13 -0700 Subject: [PATCH 021/130] re-org and clean up --- .../mjo/{scripts => }/mjo_metrics_driver.py | 0 .../__init__.py | 0 .../lib/__init__.py | 0 .../lib/annual_mean.py | 0 .../lib/bias_xy.py | 0 .../lib/bias_xyt.py | 0 .../lib/cor_xy.py | 0 .../lib/cor_xyt.py | 0 .../{pcmdi => pcmdi_mean_climate}/lib/io.py | 0 .../lib/mean_climate_metrics_calculations.py | 0 .../lib/mean_climate_metrics_driver.py | 0 .../lib/mean_xy.py | 0 .../lib/meanabs_xy.py | 0 .../lib/meanabs_xyt.py | 0 .../lib/pmp_parser.py | 0 .../lib/rms_0.py | 0 .../lib/rms_xy.py | 0 .../lib/rms_xyt.py | 0 .../lib/rmsc_xy.py | 0 .../lib/seasonal_mean.py | 0 .../lib/std_xy.py | 0 .../lib/std_xyt.py | 0 .../lib/zonal_mean.py | 0 .../pcmdi_mean_climate/lib_driver/__init__.py | 0 .../lib_driver/dataset.py | 0 .../lib_driver/model.py | 0 .../lib_driver/observation.py | 0 .../lib_driver/outputmetrics.py | 0 .../lib_driver/pmp_parameter.py | 0 .../lib_driver/pmp_parser.py | 0 .../mean_climate_driver.py | 0 .../param/basic_annual_cycle_param.py | 0 .../param/basic_param.py | 0 .../pcmdi_compute_climatologies.py | 0 .../scripts/build_obs_meta_dictionary.py | 0 .../scripts/make_obs_clim.py | 0 .../scripts/make_obs_clim.sh | 0 .../scripts/make_obs_sftlf.py | 0 .../scripts/obs_info_dictionary.json | 0 .../pcmdi_compute_climatologies-CMOR.py | 0 setup.py | 72 ++----------------- 41 files changed, 6 insertions(+), 66 deletions(-) rename pcmdi_metrics/mjo/{scripts => }/mjo_metrics_driver.py (100%) rename pcmdi_metrics/{pcmdi/lib_driver => pcmdi_mean_climate}/__init__.py (100%) rename pcmdi_metrics/{pcmdi => pcmdi_mean_climate}/lib/__init__.py (100%) rename pcmdi_metrics/{pcmdi => pcmdi_mean_climate}/lib/annual_mean.py (100%) rename pcmdi_metrics/{pcmdi => pcmdi_mean_climate}/lib/bias_xy.py (100%) rename pcmdi_metrics/{pcmdi => pcmdi_mean_climate}/lib/bias_xyt.py (100%) rename pcmdi_metrics/{pcmdi => pcmdi_mean_climate}/lib/cor_xy.py (100%) rename pcmdi_metrics/{pcmdi => pcmdi_mean_climate}/lib/cor_xyt.py (100%) rename pcmdi_metrics/{pcmdi => pcmdi_mean_climate}/lib/io.py (100%) rename pcmdi_metrics/{pcmdi => pcmdi_mean_climate}/lib/mean_climate_metrics_calculations.py (100%) rename pcmdi_metrics/{pcmdi => pcmdi_mean_climate}/lib/mean_climate_metrics_driver.py (100%) rename pcmdi_metrics/{pcmdi => pcmdi_mean_climate}/lib/mean_xy.py (100%) rename pcmdi_metrics/{pcmdi => pcmdi_mean_climate}/lib/meanabs_xy.py (100%) rename pcmdi_metrics/{pcmdi => pcmdi_mean_climate}/lib/meanabs_xyt.py (100%) rename pcmdi_metrics/{pcmdi => pcmdi_mean_climate}/lib/pmp_parser.py (100%) rename pcmdi_metrics/{pcmdi => pcmdi_mean_climate}/lib/rms_0.py (100%) rename pcmdi_metrics/{pcmdi => pcmdi_mean_climate}/lib/rms_xy.py (100%) rename pcmdi_metrics/{pcmdi => pcmdi_mean_climate}/lib/rms_xyt.py (100%) rename pcmdi_metrics/{pcmdi => pcmdi_mean_climate}/lib/rmsc_xy.py (100%) rename pcmdi_metrics/{pcmdi => pcmdi_mean_climate}/lib/seasonal_mean.py (100%) rename pcmdi_metrics/{pcmdi => pcmdi_mean_climate}/lib/std_xy.py (100%) rename pcmdi_metrics/{pcmdi => pcmdi_mean_climate}/lib/std_xyt.py (100%) rename pcmdi_metrics/{pcmdi => pcmdi_mean_climate}/lib/zonal_mean.py (100%) create mode 100644 pcmdi_metrics/pcmdi_mean_climate/lib_driver/__init__.py rename pcmdi_metrics/{pcmdi => pcmdi_mean_climate}/lib_driver/dataset.py (100%) rename pcmdi_metrics/{pcmdi => pcmdi_mean_climate}/lib_driver/model.py (100%) rename pcmdi_metrics/{pcmdi => pcmdi_mean_climate}/lib_driver/observation.py (100%) rename pcmdi_metrics/{pcmdi => pcmdi_mean_climate}/lib_driver/outputmetrics.py (100%) rename pcmdi_metrics/{pcmdi => pcmdi_mean_climate}/lib_driver/pmp_parameter.py (100%) rename pcmdi_metrics/{pcmdi => pcmdi_mean_climate}/lib_driver/pmp_parser.py (100%) rename pcmdi_metrics/{pcmdi => pcmdi_mean_climate}/mean_climate_driver.py (100%) rename pcmdi_metrics/{pcmdi => pcmdi_mean_climate}/param/basic_annual_cycle_param.py (100%) rename pcmdi_metrics/{pcmdi => pcmdi_mean_climate}/param/basic_param.py (100%) rename pcmdi_metrics/{pcmdi => pcmdi_mean_climate}/pcmdi_compute_climatologies.py (100%) rename pcmdi_metrics/{pcmdi => pcmdi_mean_climate}/scripts/build_obs_meta_dictionary.py (100%) rename pcmdi_metrics/{pcmdi => pcmdi_mean_climate}/scripts/make_obs_clim.py (100%) rename pcmdi_metrics/{pcmdi => pcmdi_mean_climate}/scripts/make_obs_clim.sh (100%) rename pcmdi_metrics/{pcmdi => pcmdi_mean_climate}/scripts/make_obs_sftlf.py (100%) rename pcmdi_metrics/{pcmdi => pcmdi_mean_climate}/scripts/obs_info_dictionary.json (100%) rename pcmdi_metrics/{pcmdi => pcmdi_mean_climate}/scripts/pcmdi_compute_climatologies-CMOR.py (100%) diff --git a/pcmdi_metrics/mjo/scripts/mjo_metrics_driver.py b/pcmdi_metrics/mjo/mjo_metrics_driver.py similarity index 100% rename from pcmdi_metrics/mjo/scripts/mjo_metrics_driver.py rename to pcmdi_metrics/mjo/mjo_metrics_driver.py diff --git a/pcmdi_metrics/pcmdi/lib_driver/__init__.py b/pcmdi_metrics/pcmdi_mean_climate/__init__.py similarity index 100% rename from pcmdi_metrics/pcmdi/lib_driver/__init__.py rename to pcmdi_metrics/pcmdi_mean_climate/__init__.py diff --git a/pcmdi_metrics/pcmdi/lib/__init__.py b/pcmdi_metrics/pcmdi_mean_climate/lib/__init__.py similarity index 100% rename from pcmdi_metrics/pcmdi/lib/__init__.py rename to pcmdi_metrics/pcmdi_mean_climate/lib/__init__.py diff --git a/pcmdi_metrics/pcmdi/lib/annual_mean.py b/pcmdi_metrics/pcmdi_mean_climate/lib/annual_mean.py similarity index 100% rename from pcmdi_metrics/pcmdi/lib/annual_mean.py rename to pcmdi_metrics/pcmdi_mean_climate/lib/annual_mean.py diff --git a/pcmdi_metrics/pcmdi/lib/bias_xy.py b/pcmdi_metrics/pcmdi_mean_climate/lib/bias_xy.py similarity index 100% rename from pcmdi_metrics/pcmdi/lib/bias_xy.py rename to pcmdi_metrics/pcmdi_mean_climate/lib/bias_xy.py diff --git a/pcmdi_metrics/pcmdi/lib/bias_xyt.py b/pcmdi_metrics/pcmdi_mean_climate/lib/bias_xyt.py similarity index 100% rename from pcmdi_metrics/pcmdi/lib/bias_xyt.py rename to pcmdi_metrics/pcmdi_mean_climate/lib/bias_xyt.py diff --git a/pcmdi_metrics/pcmdi/lib/cor_xy.py b/pcmdi_metrics/pcmdi_mean_climate/lib/cor_xy.py similarity index 100% rename from pcmdi_metrics/pcmdi/lib/cor_xy.py rename to pcmdi_metrics/pcmdi_mean_climate/lib/cor_xy.py diff --git a/pcmdi_metrics/pcmdi/lib/cor_xyt.py b/pcmdi_metrics/pcmdi_mean_climate/lib/cor_xyt.py similarity index 100% rename from pcmdi_metrics/pcmdi/lib/cor_xyt.py rename to pcmdi_metrics/pcmdi_mean_climate/lib/cor_xyt.py diff --git a/pcmdi_metrics/pcmdi/lib/io.py b/pcmdi_metrics/pcmdi_mean_climate/lib/io.py similarity index 100% rename from pcmdi_metrics/pcmdi/lib/io.py rename to pcmdi_metrics/pcmdi_mean_climate/lib/io.py diff --git a/pcmdi_metrics/pcmdi/lib/mean_climate_metrics_calculations.py b/pcmdi_metrics/pcmdi_mean_climate/lib/mean_climate_metrics_calculations.py similarity index 100% rename from pcmdi_metrics/pcmdi/lib/mean_climate_metrics_calculations.py rename to pcmdi_metrics/pcmdi_mean_climate/lib/mean_climate_metrics_calculations.py diff --git a/pcmdi_metrics/pcmdi/lib/mean_climate_metrics_driver.py b/pcmdi_metrics/pcmdi_mean_climate/lib/mean_climate_metrics_driver.py similarity index 100% rename from pcmdi_metrics/pcmdi/lib/mean_climate_metrics_driver.py rename to pcmdi_metrics/pcmdi_mean_climate/lib/mean_climate_metrics_driver.py diff --git a/pcmdi_metrics/pcmdi/lib/mean_xy.py b/pcmdi_metrics/pcmdi_mean_climate/lib/mean_xy.py similarity index 100% rename from pcmdi_metrics/pcmdi/lib/mean_xy.py rename to pcmdi_metrics/pcmdi_mean_climate/lib/mean_xy.py diff --git a/pcmdi_metrics/pcmdi/lib/meanabs_xy.py b/pcmdi_metrics/pcmdi_mean_climate/lib/meanabs_xy.py similarity index 100% rename from pcmdi_metrics/pcmdi/lib/meanabs_xy.py rename to pcmdi_metrics/pcmdi_mean_climate/lib/meanabs_xy.py diff --git a/pcmdi_metrics/pcmdi/lib/meanabs_xyt.py b/pcmdi_metrics/pcmdi_mean_climate/lib/meanabs_xyt.py similarity index 100% rename from pcmdi_metrics/pcmdi/lib/meanabs_xyt.py rename to pcmdi_metrics/pcmdi_mean_climate/lib/meanabs_xyt.py diff --git a/pcmdi_metrics/pcmdi/lib/pmp_parser.py b/pcmdi_metrics/pcmdi_mean_climate/lib/pmp_parser.py similarity index 100% rename from pcmdi_metrics/pcmdi/lib/pmp_parser.py rename to pcmdi_metrics/pcmdi_mean_climate/lib/pmp_parser.py diff --git a/pcmdi_metrics/pcmdi/lib/rms_0.py b/pcmdi_metrics/pcmdi_mean_climate/lib/rms_0.py similarity index 100% rename from pcmdi_metrics/pcmdi/lib/rms_0.py rename to pcmdi_metrics/pcmdi_mean_climate/lib/rms_0.py diff --git a/pcmdi_metrics/pcmdi/lib/rms_xy.py b/pcmdi_metrics/pcmdi_mean_climate/lib/rms_xy.py similarity index 100% rename from pcmdi_metrics/pcmdi/lib/rms_xy.py rename to pcmdi_metrics/pcmdi_mean_climate/lib/rms_xy.py diff --git a/pcmdi_metrics/pcmdi/lib/rms_xyt.py b/pcmdi_metrics/pcmdi_mean_climate/lib/rms_xyt.py similarity index 100% rename from pcmdi_metrics/pcmdi/lib/rms_xyt.py rename to pcmdi_metrics/pcmdi_mean_climate/lib/rms_xyt.py diff --git a/pcmdi_metrics/pcmdi/lib/rmsc_xy.py b/pcmdi_metrics/pcmdi_mean_climate/lib/rmsc_xy.py similarity index 100% rename from pcmdi_metrics/pcmdi/lib/rmsc_xy.py rename to pcmdi_metrics/pcmdi_mean_climate/lib/rmsc_xy.py diff --git a/pcmdi_metrics/pcmdi/lib/seasonal_mean.py b/pcmdi_metrics/pcmdi_mean_climate/lib/seasonal_mean.py similarity index 100% rename from pcmdi_metrics/pcmdi/lib/seasonal_mean.py rename to pcmdi_metrics/pcmdi_mean_climate/lib/seasonal_mean.py diff --git a/pcmdi_metrics/pcmdi/lib/std_xy.py b/pcmdi_metrics/pcmdi_mean_climate/lib/std_xy.py similarity index 100% rename from pcmdi_metrics/pcmdi/lib/std_xy.py rename to pcmdi_metrics/pcmdi_mean_climate/lib/std_xy.py diff --git a/pcmdi_metrics/pcmdi/lib/std_xyt.py b/pcmdi_metrics/pcmdi_mean_climate/lib/std_xyt.py similarity index 100% rename from pcmdi_metrics/pcmdi/lib/std_xyt.py rename to pcmdi_metrics/pcmdi_mean_climate/lib/std_xyt.py diff --git a/pcmdi_metrics/pcmdi/lib/zonal_mean.py b/pcmdi_metrics/pcmdi_mean_climate/lib/zonal_mean.py similarity index 100% rename from pcmdi_metrics/pcmdi/lib/zonal_mean.py rename to pcmdi_metrics/pcmdi_mean_climate/lib/zonal_mean.py diff --git a/pcmdi_metrics/pcmdi_mean_climate/lib_driver/__init__.py b/pcmdi_metrics/pcmdi_mean_climate/lib_driver/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/pcmdi_metrics/pcmdi/lib_driver/dataset.py b/pcmdi_metrics/pcmdi_mean_climate/lib_driver/dataset.py similarity index 100% rename from pcmdi_metrics/pcmdi/lib_driver/dataset.py rename to pcmdi_metrics/pcmdi_mean_climate/lib_driver/dataset.py diff --git a/pcmdi_metrics/pcmdi/lib_driver/model.py b/pcmdi_metrics/pcmdi_mean_climate/lib_driver/model.py similarity index 100% rename from pcmdi_metrics/pcmdi/lib_driver/model.py rename to pcmdi_metrics/pcmdi_mean_climate/lib_driver/model.py diff --git a/pcmdi_metrics/pcmdi/lib_driver/observation.py b/pcmdi_metrics/pcmdi_mean_climate/lib_driver/observation.py similarity index 100% rename from pcmdi_metrics/pcmdi/lib_driver/observation.py rename to pcmdi_metrics/pcmdi_mean_climate/lib_driver/observation.py diff --git a/pcmdi_metrics/pcmdi/lib_driver/outputmetrics.py b/pcmdi_metrics/pcmdi_mean_climate/lib_driver/outputmetrics.py similarity index 100% rename from pcmdi_metrics/pcmdi/lib_driver/outputmetrics.py rename to pcmdi_metrics/pcmdi_mean_climate/lib_driver/outputmetrics.py diff --git a/pcmdi_metrics/pcmdi/lib_driver/pmp_parameter.py b/pcmdi_metrics/pcmdi_mean_climate/lib_driver/pmp_parameter.py similarity index 100% rename from pcmdi_metrics/pcmdi/lib_driver/pmp_parameter.py rename to pcmdi_metrics/pcmdi_mean_climate/lib_driver/pmp_parameter.py diff --git a/pcmdi_metrics/pcmdi/lib_driver/pmp_parser.py b/pcmdi_metrics/pcmdi_mean_climate/lib_driver/pmp_parser.py similarity index 100% rename from pcmdi_metrics/pcmdi/lib_driver/pmp_parser.py rename to pcmdi_metrics/pcmdi_mean_climate/lib_driver/pmp_parser.py diff --git a/pcmdi_metrics/pcmdi/mean_climate_driver.py b/pcmdi_metrics/pcmdi_mean_climate/mean_climate_driver.py similarity index 100% rename from pcmdi_metrics/pcmdi/mean_climate_driver.py rename to pcmdi_metrics/pcmdi_mean_climate/mean_climate_driver.py diff --git a/pcmdi_metrics/pcmdi/param/basic_annual_cycle_param.py b/pcmdi_metrics/pcmdi_mean_climate/param/basic_annual_cycle_param.py similarity index 100% rename from pcmdi_metrics/pcmdi/param/basic_annual_cycle_param.py rename to pcmdi_metrics/pcmdi_mean_climate/param/basic_annual_cycle_param.py diff --git a/pcmdi_metrics/pcmdi/param/basic_param.py b/pcmdi_metrics/pcmdi_mean_climate/param/basic_param.py similarity index 100% rename from pcmdi_metrics/pcmdi/param/basic_param.py rename to pcmdi_metrics/pcmdi_mean_climate/param/basic_param.py diff --git a/pcmdi_metrics/pcmdi/pcmdi_compute_climatologies.py b/pcmdi_metrics/pcmdi_mean_climate/pcmdi_compute_climatologies.py similarity index 100% rename from pcmdi_metrics/pcmdi/pcmdi_compute_climatologies.py rename to pcmdi_metrics/pcmdi_mean_climate/pcmdi_compute_climatologies.py diff --git a/pcmdi_metrics/pcmdi/scripts/build_obs_meta_dictionary.py b/pcmdi_metrics/pcmdi_mean_climate/scripts/build_obs_meta_dictionary.py similarity index 100% rename from pcmdi_metrics/pcmdi/scripts/build_obs_meta_dictionary.py rename to pcmdi_metrics/pcmdi_mean_climate/scripts/build_obs_meta_dictionary.py diff --git a/pcmdi_metrics/pcmdi/scripts/make_obs_clim.py b/pcmdi_metrics/pcmdi_mean_climate/scripts/make_obs_clim.py similarity index 100% rename from pcmdi_metrics/pcmdi/scripts/make_obs_clim.py rename to pcmdi_metrics/pcmdi_mean_climate/scripts/make_obs_clim.py diff --git a/pcmdi_metrics/pcmdi/scripts/make_obs_clim.sh b/pcmdi_metrics/pcmdi_mean_climate/scripts/make_obs_clim.sh similarity index 100% rename from pcmdi_metrics/pcmdi/scripts/make_obs_clim.sh rename to pcmdi_metrics/pcmdi_mean_climate/scripts/make_obs_clim.sh diff --git a/pcmdi_metrics/pcmdi/scripts/make_obs_sftlf.py b/pcmdi_metrics/pcmdi_mean_climate/scripts/make_obs_sftlf.py similarity index 100% rename from pcmdi_metrics/pcmdi/scripts/make_obs_sftlf.py rename to pcmdi_metrics/pcmdi_mean_climate/scripts/make_obs_sftlf.py diff --git a/pcmdi_metrics/pcmdi/scripts/obs_info_dictionary.json b/pcmdi_metrics/pcmdi_mean_climate/scripts/obs_info_dictionary.json similarity index 100% rename from pcmdi_metrics/pcmdi/scripts/obs_info_dictionary.json rename to pcmdi_metrics/pcmdi_mean_climate/scripts/obs_info_dictionary.json diff --git a/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-CMOR.py b/pcmdi_metrics/pcmdi_mean_climate/scripts/pcmdi_compute_climatologies-CMOR.py similarity index 100% rename from pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-CMOR.py rename to pcmdi_metrics/pcmdi_mean_climate/scripts/pcmdi_compute_climatologies-CMOR.py diff --git a/setup.py b/setup.py index e323ed341..5c1462dd6 100755 --- a/setup.py +++ b/setup.py @@ -7,13 +7,7 @@ from setuptools import find_packages, setup -if "--enable-devel" in sys.argv: - install_dev = True - sys.argv.remove("--enable-devel") -else: - install_dev = False - -Version = "2.0" +Version = "2.5" p = subprocess.Popen( ("git", "describe", "--tags"), stdin=subprocess.PIPE, @@ -48,32 +42,18 @@ p = subprocess.Popen(["python", "setup_default_args.py"], cwd="share") p.communicate() -portrait_files = [ - "pcmdi_metrics/graphics/share/portraits.scr", -] - -packages = { - "pcmdi_metrics": "src/python", - "pcmdi_metrics.io": "src/python/io", - "pcmdi_metrics.pcmdi": "src/python/pcmdi", - "pcmdi_metrics.diurnal": "src/python/diurnal", - "pcmdi_metrics.graphics": "src/python/graphics", - "pcmdi_metrics.driver": "src/python/pcmdi/scripts/driver", - "pcmdi_metrics.monsoon_wang": "src/python/monsoon_wang/lib", - "pcmdi_metrics.monsoon_sperber": "src/python/monsoon_sperber/lib", -} packages = find_packages() scripts = [ - "pcmdi_metrics/pcmdi/scripts/mean_climate_driver.py", - "pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies.py", - "pcmdi_metrics/misc/scripts/parallelize_driver.py", - "pcmdi_metrics/misc/scripts/get_pmp_data.py", + "pcmdi_metrics/pcmdi_mean_climate/pcmdi_compute_climatologies.py", + "pcmdi_metrics/pcmdi_mean_climate/mean_climate_driver.py", "pcmdi_metrics/monsoon_wang/scripts/mpindex_compute.py", "pcmdi_metrics/monsoon_sperber/scripts/driver_monsoon_sperber.py", - "pcmdi_metrics/mjo/scripts/mjo_metrics_driver.py", + "pcmdi_metrics/mjo/mjo_metrics_driver.py", "pcmdi_metrics/variability_mode/variability_modes_driver.py", "pcmdi_metrics/enso/enso_driver.py", "pcmdi_metrics/precip_variability/variability_across_timescales_PS_driver.py", + "pcmdi_metrics/misc/scripts/parallelize_driver.py", + "pcmdi_metrics/misc/scripts/get_pmp_data.py", ] # scripts += glob.glob("pcmdi_metrics/diurnal/scripts/*.py") @@ -94,7 +74,6 @@ print("demo files") data_files = ( - ("share/pmp/graphics/vcs", portrait_files), ( "share/pmp/graphics/png", [ @@ -132,35 +111,6 @@ ("share/pmp/demo", demo_files), ) -if install_dev: - print("Adding experimental packages") - dev_packages = glob.glob("src/python/devel/*") - dev_packages.remove("src/python/devel/example_dev") - for p in dev_packages: - if not os.path.isdir(p): - dev_packages.pop(p) - dev_scripts = [] - for p in dev_packages: - scripts = glob.glob(os.path.join(p, "scripts", "*")) - dev_scripts += scripts - dev_pkg = {} - dev_data = [] - for p in dev_packages: - nm = p.replace("/", ".") - nm = nm.replace("src.python.devel", "pcmdi_metrics") - pnm = nm.split(".")[-1] - pkg_dir = os.path.join(p, "lib") - dev_pkg[nm] = pkg_dir - data = glob.glob(os.path.join(p, "data", "*")) - for d in data: - dir_nm = os.path.split(d)[-1] - dev_data.append( - [os.path.join(dir_nm, pnm), glob.glob(os.path.join(d, "*"))] - ) - packages.update(dev_pkg) - data_files += dev_data - scripts += dev_scripts - setup( name="pcmdi_metrics", version=descr, @@ -171,14 +121,4 @@ scripts=scripts, data_files=data_files, entry_points=entry_points, - # include_dirs = [numpy.lib.utils.get_include()], - # ext_modules = [ - # Extension('pcmdi_metrics.exts', - # ['src/C/add.c',], - # library_dirs = [], - # libraries = [], - # define_macros = [], - # extra_compile_args = [], - # extra_link_args = [], - # ] ) From c655b1e106e3e1dbee45e3b9ea18cf7a7be68420 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Wed, 26 Oct 2022 17:38:02 -0700 Subject: [PATCH 022/130] clean up --- setup.py | 1 - 1 file changed, 1 deletion(-) diff --git a/setup.py b/setup.py index 5c1462dd6..ca6ca730d 100755 --- a/setup.py +++ b/setup.py @@ -55,7 +55,6 @@ "pcmdi_metrics/misc/scripts/parallelize_driver.py", "pcmdi_metrics/misc/scripts/get_pmp_data.py", ] -# scripts += glob.glob("pcmdi_metrics/diurnal/scripts/*.py") entry_points = { "console_scripts": [ From 850a45a85db5ba4489f9b465e9333de5558f2eff Mon Sep 17 00:00:00 2001 From: lee1043 Date: Wed, 26 Oct 2022 17:39:33 -0700 Subject: [PATCH 023/130] rename mean climate directory --- .../{pcmdi_mean_climate => mean_climate}/__init__.py | 0 .../{pcmdi_mean_climate => mean_climate}/lib/__init__.py | 0 .../{pcmdi_mean_climate => mean_climate}/lib/annual_mean.py | 0 .../{pcmdi_mean_climate => mean_climate}/lib/bias_xy.py | 0 .../{pcmdi_mean_climate => mean_climate}/lib/bias_xyt.py | 0 .../{pcmdi_mean_climate => mean_climate}/lib/cor_xy.py | 0 .../{pcmdi_mean_climate => mean_climate}/lib/cor_xyt.py | 0 pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/lib/io.py | 0 .../lib/mean_climate_metrics_calculations.py | 0 .../lib/mean_climate_metrics_driver.py | 0 .../{pcmdi_mean_climate => mean_climate}/lib/mean_xy.py | 0 .../{pcmdi_mean_climate => mean_climate}/lib/meanabs_xy.py | 0 .../{pcmdi_mean_climate => mean_climate}/lib/meanabs_xyt.py | 0 .../{pcmdi_mean_climate => mean_climate}/lib/pmp_parser.py | 0 .../{pcmdi_mean_climate => mean_climate}/lib/rms_0.py | 0 .../{pcmdi_mean_climate => mean_climate}/lib/rms_xy.py | 0 .../{pcmdi_mean_climate => mean_climate}/lib/rms_xyt.py | 0 .../{pcmdi_mean_climate => mean_climate}/lib/rmsc_xy.py | 0 .../{pcmdi_mean_climate => mean_climate}/lib/seasonal_mean.py | 0 .../{pcmdi_mean_climate => mean_climate}/lib/std_xy.py | 0 .../{pcmdi_mean_climate => mean_climate}/lib/std_xyt.py | 0 .../{pcmdi_mean_climate => mean_climate}/lib/zonal_mean.py | 0 .../lib_driver/__init__.py | 0 .../lib_driver/dataset.py | 0 .../{pcmdi_mean_climate => mean_climate}/lib_driver/model.py | 0 .../lib_driver/observation.py | 0 .../lib_driver/outputmetrics.py | 0 .../lib_driver/pmp_parameter.py | 0 .../lib_driver/pmp_parser.py | 0 .../mean_climate_driver.py | 0 .../param/basic_annual_cycle_param.py | 0 .../{pcmdi_mean_climate => mean_climate}/param/basic_param.py | 0 .../pcmdi_compute_climatologies.py | 0 .../scripts/build_obs_meta_dictionary.py | 0 .../scripts/make_obs_clim.py | 0 .../scripts/make_obs_clim.sh | 0 .../scripts/make_obs_sftlf.py | 0 .../scripts/obs_info_dictionary.json | 0 .../scripts/pcmdi_compute_climatologies-CMOR.py | 0 setup.py | 4 ++-- 40 files changed, 2 insertions(+), 2 deletions(-) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/__init__.py (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/lib/__init__.py (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/lib/annual_mean.py (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/lib/bias_xy.py (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/lib/bias_xyt.py (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/lib/cor_xy.py (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/lib/cor_xyt.py (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/lib/io.py (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/lib/mean_climate_metrics_calculations.py (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/lib/mean_climate_metrics_driver.py (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/lib/mean_xy.py (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/lib/meanabs_xy.py (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/lib/meanabs_xyt.py (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/lib/pmp_parser.py (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/lib/rms_0.py (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/lib/rms_xy.py (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/lib/rms_xyt.py (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/lib/rmsc_xy.py (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/lib/seasonal_mean.py (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/lib/std_xy.py (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/lib/std_xyt.py (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/lib/zonal_mean.py (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/lib_driver/__init__.py (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/lib_driver/dataset.py (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/lib_driver/model.py (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/lib_driver/observation.py (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/lib_driver/outputmetrics.py (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/lib_driver/pmp_parameter.py (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/lib_driver/pmp_parser.py (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/mean_climate_driver.py (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/param/basic_annual_cycle_param.py (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/param/basic_param.py (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/pcmdi_compute_climatologies.py (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/scripts/build_obs_meta_dictionary.py (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/scripts/make_obs_clim.py (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/scripts/make_obs_clim.sh (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/scripts/make_obs_sftlf.py (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/scripts/obs_info_dictionary.json (100%) rename pcmdi_metrics/{pcmdi_mean_climate => mean_climate}/scripts/pcmdi_compute_climatologies-CMOR.py (100%) diff --git a/pcmdi_metrics/pcmdi_mean_climate/__init__.py b/pcmdi_metrics/mean_climate/__init__.py similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/__init__.py rename to pcmdi_metrics/mean_climate/__init__.py diff --git a/pcmdi_metrics/pcmdi_mean_climate/lib/__init__.py b/pcmdi_metrics/mean_climate/lib/__init__.py similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/lib/__init__.py rename to pcmdi_metrics/mean_climate/lib/__init__.py diff --git a/pcmdi_metrics/pcmdi_mean_climate/lib/annual_mean.py b/pcmdi_metrics/mean_climate/lib/annual_mean.py similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/lib/annual_mean.py rename to pcmdi_metrics/mean_climate/lib/annual_mean.py diff --git a/pcmdi_metrics/pcmdi_mean_climate/lib/bias_xy.py b/pcmdi_metrics/mean_climate/lib/bias_xy.py similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/lib/bias_xy.py rename to pcmdi_metrics/mean_climate/lib/bias_xy.py diff --git a/pcmdi_metrics/pcmdi_mean_climate/lib/bias_xyt.py b/pcmdi_metrics/mean_climate/lib/bias_xyt.py similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/lib/bias_xyt.py rename to pcmdi_metrics/mean_climate/lib/bias_xyt.py diff --git a/pcmdi_metrics/pcmdi_mean_climate/lib/cor_xy.py b/pcmdi_metrics/mean_climate/lib/cor_xy.py similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/lib/cor_xy.py rename to pcmdi_metrics/mean_climate/lib/cor_xy.py diff --git a/pcmdi_metrics/pcmdi_mean_climate/lib/cor_xyt.py b/pcmdi_metrics/mean_climate/lib/cor_xyt.py similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/lib/cor_xyt.py rename to pcmdi_metrics/mean_climate/lib/cor_xyt.py diff --git a/pcmdi_metrics/pcmdi_mean_climate/lib/io.py b/pcmdi_metrics/mean_climate/lib/io.py similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/lib/io.py rename to pcmdi_metrics/mean_climate/lib/io.py diff --git a/pcmdi_metrics/pcmdi_mean_climate/lib/mean_climate_metrics_calculations.py b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_calculations.py similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/lib/mean_climate_metrics_calculations.py rename to pcmdi_metrics/mean_climate/lib/mean_climate_metrics_calculations.py diff --git a/pcmdi_metrics/pcmdi_mean_climate/lib/mean_climate_metrics_driver.py b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/lib/mean_climate_metrics_driver.py rename to pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py diff --git a/pcmdi_metrics/pcmdi_mean_climate/lib/mean_xy.py b/pcmdi_metrics/mean_climate/lib/mean_xy.py similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/lib/mean_xy.py rename to pcmdi_metrics/mean_climate/lib/mean_xy.py diff --git a/pcmdi_metrics/pcmdi_mean_climate/lib/meanabs_xy.py b/pcmdi_metrics/mean_climate/lib/meanabs_xy.py similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/lib/meanabs_xy.py rename to pcmdi_metrics/mean_climate/lib/meanabs_xy.py diff --git a/pcmdi_metrics/pcmdi_mean_climate/lib/meanabs_xyt.py b/pcmdi_metrics/mean_climate/lib/meanabs_xyt.py similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/lib/meanabs_xyt.py rename to pcmdi_metrics/mean_climate/lib/meanabs_xyt.py diff --git a/pcmdi_metrics/pcmdi_mean_climate/lib/pmp_parser.py b/pcmdi_metrics/mean_climate/lib/pmp_parser.py similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/lib/pmp_parser.py rename to pcmdi_metrics/mean_climate/lib/pmp_parser.py diff --git a/pcmdi_metrics/pcmdi_mean_climate/lib/rms_0.py b/pcmdi_metrics/mean_climate/lib/rms_0.py similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/lib/rms_0.py rename to pcmdi_metrics/mean_climate/lib/rms_0.py diff --git a/pcmdi_metrics/pcmdi_mean_climate/lib/rms_xy.py b/pcmdi_metrics/mean_climate/lib/rms_xy.py similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/lib/rms_xy.py rename to pcmdi_metrics/mean_climate/lib/rms_xy.py diff --git a/pcmdi_metrics/pcmdi_mean_climate/lib/rms_xyt.py b/pcmdi_metrics/mean_climate/lib/rms_xyt.py similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/lib/rms_xyt.py rename to pcmdi_metrics/mean_climate/lib/rms_xyt.py diff --git a/pcmdi_metrics/pcmdi_mean_climate/lib/rmsc_xy.py b/pcmdi_metrics/mean_climate/lib/rmsc_xy.py similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/lib/rmsc_xy.py rename to pcmdi_metrics/mean_climate/lib/rmsc_xy.py diff --git a/pcmdi_metrics/pcmdi_mean_climate/lib/seasonal_mean.py b/pcmdi_metrics/mean_climate/lib/seasonal_mean.py similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/lib/seasonal_mean.py rename to pcmdi_metrics/mean_climate/lib/seasonal_mean.py diff --git a/pcmdi_metrics/pcmdi_mean_climate/lib/std_xy.py b/pcmdi_metrics/mean_climate/lib/std_xy.py similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/lib/std_xy.py rename to pcmdi_metrics/mean_climate/lib/std_xy.py diff --git a/pcmdi_metrics/pcmdi_mean_climate/lib/std_xyt.py b/pcmdi_metrics/mean_climate/lib/std_xyt.py similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/lib/std_xyt.py rename to pcmdi_metrics/mean_climate/lib/std_xyt.py diff --git a/pcmdi_metrics/pcmdi_mean_climate/lib/zonal_mean.py b/pcmdi_metrics/mean_climate/lib/zonal_mean.py similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/lib/zonal_mean.py rename to pcmdi_metrics/mean_climate/lib/zonal_mean.py diff --git a/pcmdi_metrics/pcmdi_mean_climate/lib_driver/__init__.py b/pcmdi_metrics/mean_climate/lib_driver/__init__.py similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/lib_driver/__init__.py rename to pcmdi_metrics/mean_climate/lib_driver/__init__.py diff --git a/pcmdi_metrics/pcmdi_mean_climate/lib_driver/dataset.py b/pcmdi_metrics/mean_climate/lib_driver/dataset.py similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/lib_driver/dataset.py rename to pcmdi_metrics/mean_climate/lib_driver/dataset.py diff --git a/pcmdi_metrics/pcmdi_mean_climate/lib_driver/model.py b/pcmdi_metrics/mean_climate/lib_driver/model.py similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/lib_driver/model.py rename to pcmdi_metrics/mean_climate/lib_driver/model.py diff --git a/pcmdi_metrics/pcmdi_mean_climate/lib_driver/observation.py b/pcmdi_metrics/mean_climate/lib_driver/observation.py similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/lib_driver/observation.py rename to pcmdi_metrics/mean_climate/lib_driver/observation.py diff --git a/pcmdi_metrics/pcmdi_mean_climate/lib_driver/outputmetrics.py b/pcmdi_metrics/mean_climate/lib_driver/outputmetrics.py similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/lib_driver/outputmetrics.py rename to pcmdi_metrics/mean_climate/lib_driver/outputmetrics.py diff --git a/pcmdi_metrics/pcmdi_mean_climate/lib_driver/pmp_parameter.py b/pcmdi_metrics/mean_climate/lib_driver/pmp_parameter.py similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/lib_driver/pmp_parameter.py rename to pcmdi_metrics/mean_climate/lib_driver/pmp_parameter.py diff --git a/pcmdi_metrics/pcmdi_mean_climate/lib_driver/pmp_parser.py b/pcmdi_metrics/mean_climate/lib_driver/pmp_parser.py similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/lib_driver/pmp_parser.py rename to pcmdi_metrics/mean_climate/lib_driver/pmp_parser.py diff --git a/pcmdi_metrics/pcmdi_mean_climate/mean_climate_driver.py b/pcmdi_metrics/mean_climate/mean_climate_driver.py similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/mean_climate_driver.py rename to pcmdi_metrics/mean_climate/mean_climate_driver.py diff --git a/pcmdi_metrics/pcmdi_mean_climate/param/basic_annual_cycle_param.py b/pcmdi_metrics/mean_climate/param/basic_annual_cycle_param.py similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/param/basic_annual_cycle_param.py rename to pcmdi_metrics/mean_climate/param/basic_annual_cycle_param.py diff --git a/pcmdi_metrics/pcmdi_mean_climate/param/basic_param.py b/pcmdi_metrics/mean_climate/param/basic_param.py similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/param/basic_param.py rename to pcmdi_metrics/mean_climate/param/basic_param.py diff --git a/pcmdi_metrics/pcmdi_mean_climate/pcmdi_compute_climatologies.py b/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/pcmdi_compute_climatologies.py rename to pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py diff --git a/pcmdi_metrics/pcmdi_mean_climate/scripts/build_obs_meta_dictionary.py b/pcmdi_metrics/mean_climate/scripts/build_obs_meta_dictionary.py similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/scripts/build_obs_meta_dictionary.py rename to pcmdi_metrics/mean_climate/scripts/build_obs_meta_dictionary.py diff --git a/pcmdi_metrics/pcmdi_mean_climate/scripts/make_obs_clim.py b/pcmdi_metrics/mean_climate/scripts/make_obs_clim.py similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/scripts/make_obs_clim.py rename to pcmdi_metrics/mean_climate/scripts/make_obs_clim.py diff --git a/pcmdi_metrics/pcmdi_mean_climate/scripts/make_obs_clim.sh b/pcmdi_metrics/mean_climate/scripts/make_obs_clim.sh similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/scripts/make_obs_clim.sh rename to pcmdi_metrics/mean_climate/scripts/make_obs_clim.sh diff --git a/pcmdi_metrics/pcmdi_mean_climate/scripts/make_obs_sftlf.py b/pcmdi_metrics/mean_climate/scripts/make_obs_sftlf.py similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/scripts/make_obs_sftlf.py rename to pcmdi_metrics/mean_climate/scripts/make_obs_sftlf.py diff --git a/pcmdi_metrics/pcmdi_mean_climate/scripts/obs_info_dictionary.json b/pcmdi_metrics/mean_climate/scripts/obs_info_dictionary.json similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/scripts/obs_info_dictionary.json rename to pcmdi_metrics/mean_climate/scripts/obs_info_dictionary.json diff --git a/pcmdi_metrics/pcmdi_mean_climate/scripts/pcmdi_compute_climatologies-CMOR.py b/pcmdi_metrics/mean_climate/scripts/pcmdi_compute_climatologies-CMOR.py similarity index 100% rename from pcmdi_metrics/pcmdi_mean_climate/scripts/pcmdi_compute_climatologies-CMOR.py rename to pcmdi_metrics/mean_climate/scripts/pcmdi_compute_climatologies-CMOR.py diff --git a/setup.py b/setup.py index ca6ca730d..cae53b1ba 100755 --- a/setup.py +++ b/setup.py @@ -44,8 +44,8 @@ packages = find_packages() scripts = [ - "pcmdi_metrics/pcmdi_mean_climate/pcmdi_compute_climatologies.py", - "pcmdi_metrics/pcmdi_mean_climate/mean_climate_driver.py", + "pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py", + "pcmdi_metrics/mean_climate/mean_climate_driver.py", "pcmdi_metrics/monsoon_wang/scripts/mpindex_compute.py", "pcmdi_metrics/monsoon_sperber/scripts/driver_monsoon_sperber.py", "pcmdi_metrics/mjo/mjo_metrics_driver.py", From bf91c8b2bfc112dd528f7a3110b9b87b2cd26c29 Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Wed, 26 Oct 2022 17:50:40 -0700 Subject: [PATCH 024/130] change import accordingly to the reorganized directories --- .../lib/mean_climate_metrics_calculations.py | 104 +++++++++--------- .../lib/mean_climate_metrics_driver.py | 18 +-- pcmdi_metrics/mean_climate/lib/pmp_parser.py | 4 +- .../mean_climate/lib_driver/model.py | 4 +- .../mean_climate/lib_driver/observation.py | 2 +- .../mean_climate/lib_driver/outputmetrics.py | 4 +- .../mean_climate/lib_driver/pmp_parser.py | 6 +- 7 files changed, 71 insertions(+), 71 deletions(-) diff --git a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_calculations.py b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_calculations.py index 764857c14..fd8fa95cb 100644 --- a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_calculations.py +++ b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_calculations.py @@ -13,26 +13,26 @@ def compute_metrics(Var, dm, do): # Did we send data? Or do we just want the info? if dm is None and do is None: metrics_defs = collections.OrderedDict() - metrics_defs["rms_xyt"] = pcmdi_metrics.pcmdi.rms_xyt.compute(None, None) - metrics_defs["rms_xy"] = pcmdi_metrics.pcmdi.rms_xy.compute(None, None) - metrics_defs["rmsc_xy"] = pcmdi_metrics.pcmdi.rmsc_xy.compute(None, None) - metrics_defs["bias_xy"] = pcmdi_metrics.pcmdi.bias_xy.compute(None, None) - metrics_defs["mae_xy"] = pcmdi_metrics.pcmdi.meanabs_xy.compute(None, None) - # metrics_defs["cor_xyt"] = pcmdi_metrics.pcmdi.cor_xyt.compute( + metrics_defs["rms_xyt"] = pcmdi_metrics.mean_climate.lib.rms_xyt.compute(None, None) + metrics_defs["rms_xy"] = pcmdi_metrics.mean_climate.lib.rms_xy.compute(None, None) + metrics_defs["rmsc_xy"] = pcmdi_metrics.mean_climate.lib.rmsc_xy.compute(None, None) + metrics_defs["bias_xy"] = pcmdi_metrics.mean_climate.lib.bias_xy.compute(None, None) + metrics_defs["mae_xy"] = pcmdi_metrics.mean_climate.lib.meanabs_xy.compute(None, None) + # metrics_defs["cor_xyt"] = pcmdi_metrics.mean_climate.lib.cor_xyt.compute( # None, # None) - metrics_defs["cor_xy"] = pcmdi_metrics.pcmdi.cor_xy.compute(None, None) - metrics_defs["mean_xy"] = pcmdi_metrics.pcmdi.mean_xy.compute(None) - metrics_defs["std_xy"] = pcmdi_metrics.pcmdi.std_xy.compute(None) - metrics_defs["std_xyt"] = pcmdi_metrics.pcmdi.std_xyt.compute(None) + metrics_defs["cor_xy"] = pcmdi_metrics.mean_climate.lib.cor_xy.compute(None, None) + metrics_defs["mean_xy"] = pcmdi_metrics.mean_climate.lib.mean_xy.compute(None) + metrics_defs["std_xy"] = pcmdi_metrics.mean_climate.lib.std_xy.compute(None) + metrics_defs["std_xyt"] = pcmdi_metrics.mean_climate.lib.std_xyt.compute(None) - metrics_defs["seasonal_mean"] = pcmdi_metrics.pcmdi.seasonal_mean.compute( + metrics_defs["seasonal_mean"] = pcmdi_metrics.mean_climate.lib.seasonal_mean.compute( None, None ) - metrics_defs["annual_mean"] = pcmdi_metrics.pcmdi.annual_mean.compute( + metrics_defs["annual_mean"] = pcmdi_metrics.mean_climate.lib.annual_mean.compute( None, None ) - metrics_defs["zonal_mean"] = pcmdi_metrics.pcmdi.zonal_mean.compute(None, None) + metrics_defs["zonal_mean"] = pcmdi_metrics.mean_climate.lib.zonal_mean.compute(None, None) return metrics_defs cdms.setAutoBounds("on") metrics_dictionary = {} @@ -49,54 +49,54 @@ def compute_metrics(Var, dm, do): sig_digits = ".3f" # CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD - rms_xyt = pcmdi_metrics.pcmdi.rms_xyt.compute(dm, do) - # cor_xyt = pcmdi_metrics.pcmdi.cor_xyt.compute(dm, do) - stdObs_xyt = pcmdi_metrics.pcmdi.std_xyt.compute(do) - std_xyt = pcmdi_metrics.pcmdi.std_xyt.compute(dm) + rms_xyt = pcmdi_metrics.mean_climate.lib.rms_xyt.compute(dm, do) + # cor_xyt = pcmdi_metrics.mean_climate.lib.cor_xyt.compute(dm, do) + stdObs_xyt = pcmdi_metrics.mean_climate.lib.std_xyt.compute(do) + std_xyt = pcmdi_metrics.mean_climate.lib.std_xyt.compute(dm) # CALCULATE ANNUAL MEANS - dm_am, do_am = pcmdi_metrics.pcmdi.annual_mean.compute(dm, do) + dm_am, do_am = pcmdi_metrics.mean_climate.lib.annual_mean.compute(dm, do) # CALCULATE ANNUAL MEAN BIAS - bias_xy = pcmdi_metrics.pcmdi.bias_xy.compute(dm_am, do_am) + bias_xy = pcmdi_metrics.mean_climate.lib.bias_xy.compute(dm_am, do_am) # CALCULATE MEAN ABSOLUTE ERROR - mae_xy = pcmdi_metrics.pcmdi.meanabs_xy.compute(dm_am, do_am) + mae_xy = pcmdi_metrics.mean_climate.lib.meanabs_xy.compute(dm_am, do_am) # CALCULATE ANNUAL MEAN RMS (centered and uncentered) - rms_xy = pcmdi_metrics.pcmdi.rms_xy.compute(dm_am, do_am) - rmsc_xy = pcmdi_metrics.pcmdi.rmsc_xy.compute(dm_am, do_am) + rms_xy = pcmdi_metrics.mean_climate.lib.rms_xy.compute(dm_am, do_am) + rmsc_xy = pcmdi_metrics.mean_climate.lib.rmsc_xy.compute(dm_am, do_am) # CALCULATE ANNUAL MEAN CORRELATION - cor_xy = pcmdi_metrics.pcmdi.cor_xy.compute(dm_am, do_am) + cor_xy = pcmdi_metrics.mean_climate.lib.cor_xy.compute(dm_am, do_am) # CALCULATE ANNUAL OBS and MOD STD - stdObs_xy = pcmdi_metrics.pcmdi.std_xy.compute(do_am) - std_xy = pcmdi_metrics.pcmdi.std_xy.compute(dm_am) + stdObs_xy = pcmdi_metrics.mean_climate.lib.std_xy.compute(do_am) + std_xy = pcmdi_metrics.mean_climate.lib.std_xy.compute(dm_am) # CALCULATE ANNUAL OBS and MOD MEAN - meanObs_xy = pcmdi_metrics.pcmdi.mean_xy.compute(do_am) - mean_xy = pcmdi_metrics.pcmdi.mean_xy.compute(dm_am) + meanObs_xy = pcmdi_metrics.mean_climate.lib.mean_xy.compute(do_am) + mean_xy = pcmdi_metrics.mean_climate.lib.mean_xy.compute(dm_am) # ZONAL MEANS ###### # CALCULATE ANNUAL MEANS - dm_amzm, do_amzm = pcmdi_metrics.pcmdi.zonal_mean.compute(dm_am, do_am) + dm_amzm, do_amzm = pcmdi_metrics.mean_climate.lib.zonal_mean.compute(dm_am, do_am) # CALCULATE ANNUAL AND ZONAL MEAN RMS - rms_y = pcmdi_metrics.pcmdi.rms_0.compute(dm_amzm, do_amzm) + rms_y = pcmdi_metrics.mean_climate.lib.rms_0.compute(dm_amzm, do_amzm) # CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS dm_amzm_grown, dummy = grower(dm_amzm, dm_am) dm_am_devzm = MV2.subtract(dm_am, dm_amzm_grown) do_amzm_grown, dummy = grower(do_amzm, do_am) do_am_devzm = MV2.subtract(do_am, do_amzm_grown) - rms_xy_devzm = pcmdi_metrics.pcmdi.rms_xy.compute(dm_am_devzm, do_am_devzm) + rms_xy_devzm = pcmdi_metrics.mean_climate.lib.rms_xy.compute(dm_am_devzm, do_am_devzm) # CALCULATE ANNUAL AND ZONAL MEAN STD # CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD - stdObs_xy_devzm = pcmdi_metrics.pcmdi.std_xy.compute(do_am_devzm) - std_xy_devzm = pcmdi_metrics.pcmdi.std_xy.compute(dm_am_devzm) + stdObs_xy_devzm = pcmdi_metrics.mean_climate.lib.std_xy.compute(do_am_devzm) + std_xy_devzm = pcmdi_metrics.mean_climate.lib.std_xy.compute(dm_am_devzm) for stat in [ "std-obs_xy", @@ -141,23 +141,23 @@ def compute_metrics(Var, dm, do): # CALCULATE SEASONAL MEANS for sea in ["djf", "mam", "jja", "son"]: - dm_sea = pcmdi_metrics.pcmdi.seasonal_mean.compute(dm, sea) - do_sea = pcmdi_metrics.pcmdi.seasonal_mean.compute(do, sea) + dm_sea = pcmdi_metrics.mean_climate.lib.seasonal_mean.compute(dm, sea) + do_sea = pcmdi_metrics.mean_climate.lib.seasonal_mean.compute(do, sea) # CALCULATE SEASONAL RMS AND CORRELATION - rms_sea = pcmdi_metrics.pcmdi.rms_xy.compute(dm_sea, do_sea) - rmsc_sea = pcmdi_metrics.pcmdi.rmsc_xy.compute(dm_sea, do_sea) - cor_sea = pcmdi_metrics.pcmdi.cor_xy.compute(dm_sea, do_sea) - mae_sea = pcmdi_metrics.pcmdi.meanabs_xy.compute(dm_sea, do_sea) - bias_sea = pcmdi_metrics.pcmdi.bias_xy.compute(dm_sea, do_sea) + rms_sea = pcmdi_metrics.mean_climate.lib.rms_xy.compute(dm_sea, do_sea) + rmsc_sea = pcmdi_metrics.mean_climate.lib.rmsc_xy.compute(dm_sea, do_sea) + cor_sea = pcmdi_metrics.mean_climate.lib.cor_xy.compute(dm_sea, do_sea) + mae_sea = pcmdi_metrics.mean_climate.lib.meanabs_xy.compute(dm_sea, do_sea) + bias_sea = pcmdi_metrics.mean_climate.lib.bias_xy.compute(dm_sea, do_sea) # CALCULATE SEASONAL OBS and MOD STD - stdObs_xy_sea = pcmdi_metrics.pcmdi.std_xy.compute(do_sea) - std_xy_sea = pcmdi_metrics.pcmdi.std_xy.compute(dm_sea) + stdObs_xy_sea = pcmdi_metrics.mean_climate.lib.std_xy.compute(do_sea) + std_xy_sea = pcmdi_metrics.mean_climate.lib.std_xy.compute(dm_sea) # CALCULATE SEASONAL OBS and MOD MEAN - meanObs_xy_sea = pcmdi_metrics.pcmdi.mean_xy.compute(do_sea) - mean_xy_sea = pcmdi_metrics.pcmdi.mean_xy.compute(dm_sea) + meanObs_xy_sea = pcmdi_metrics.mean_climate.lib.mean_xy.compute(do_sea) + mean_xy_sea = pcmdi_metrics.mean_climate.lib.mean_xy.compute(dm_sea) metrics_dictionary["bias_xy"][sea] = format(bias_sea * conv, sig_digits) metrics_dictionary["rms_xy"][sea] = format(rms_sea * conv, sig_digits) @@ -201,19 +201,19 @@ def compute_metrics(Var, dm, do): do_mo = do[n] # CALCULATE MONTHLY RMS AND CORRELATION - rms_mo = pcmdi_metrics.pcmdi.rms_xy.compute(dm_mo, do_mo) - rmsc_mo = pcmdi_metrics.pcmdi.rmsc_xy.compute(dm_mo, do_mo) - cor_mo = pcmdi_metrics.pcmdi.cor_xy.compute(dm_mo, do_mo) - mae_mo = pcmdi_metrics.pcmdi.meanabs_xy.compute(dm_mo, do_mo) - bias_mo = pcmdi_metrics.pcmdi.bias_xy.compute(dm_mo, do_mo) + rms_mo = pcmdi_metrics.mean_climate.lib.rms_xy.compute(dm_mo, do_mo) + rmsc_mo = pcmdi_metrics.mean_climate.lib.rmsc_xy.compute(dm_mo, do_mo) + cor_mo = pcmdi_metrics.mean_climate.lib.cor_xy.compute(dm_mo, do_mo) + mae_mo = pcmdi_metrics.mean_climate.lib.meanabs_xy.compute(dm_mo, do_mo) + bias_mo = pcmdi_metrics.mean_climate.lib.bias_xy.compute(dm_mo, do_mo) # CALCULATE MONTHLY OBS and MOD STD - stdObs_xy_mo = pcmdi_metrics.pcmdi.std_xy.compute(do_mo) - std_xy_mo = pcmdi_metrics.pcmdi.std_xy.compute(dm_mo) + stdObs_xy_mo = pcmdi_metrics.mean_climate.lib.std_xy.compute(do_mo) + std_xy_mo = pcmdi_metrics.mean_climate.lib.std_xy.compute(dm_mo) # CALCULATE MONTHLY OBS and MOD MEAN - meanObs_xy_mo = pcmdi_metrics.pcmdi.mean_xy.compute(do_mo) - mean_xy_mo = pcmdi_metrics.pcmdi.mean_xy.compute(dm_mo) + meanObs_xy_mo = pcmdi_metrics.mean_climate.lib.mean_xy.compute(do_mo) + mean_xy_mo = pcmdi_metrics.mean_climate.lib.mean_xy.compute(dm_mo) rms_mo_l.append(format(rms_mo * conv, sig_digits)) rmsc_mo_l.append(format(rmsc_mo * conv, sig_digits)) diff --git a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py index 82f6a187a..b53f32be3 100644 --- a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py +++ b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py @@ -3,12 +3,12 @@ import json import logging -import pcmdi_metrics.driver.dataset -import pcmdi_metrics.driver.pmp_parser +import pcmdi_metrics.mean_climate.lib_driver.dataset +import pcmdi_metrics.mean_climate.lib_driver.pmp_parser from pcmdi_metrics import LOG_LEVEL -from pcmdi_metrics.driver.model import Model -from pcmdi_metrics.driver.observation import Observation -from pcmdi_metrics.driver.outputmetrics import OutputMetrics +from pcmdi_metrics.mean_climate.lib_driver.model import Model +from pcmdi_metrics.mean_climate.lib_driver.observation import Observation +from pcmdi_metrics.mean_climate.lib_driver.outputmetrics import OutputMetrics class PMPDriver(object): @@ -37,7 +37,7 @@ def __init__(self, parameter): self.var = "" self.output_metric = None self.region = "" - self.sftlf = pcmdi_metrics.driver.dataset.DataSet.create_sftlf(self.parameter) + self.sftlf = pcmdi_metrics.mean_climate.lib_driver.dataset.DataSet.create_sftlf(self.parameter) self.default_regions = [] self.regions_specs = {} @@ -67,7 +67,7 @@ def load_obs_dict(self): """Loads obs_info_dictionary.json and appends custom_observations from the parameter file if needed.""" obs_file_name = "obs_info_dictionary.json" - obs_json_file = pcmdi_metrics.driver.dataset.DataSet.load_path_as_file_obj( + obs_json_file = pcmdi_metrics.mean_climate.lib_driver.dataset.DataSet.load_path_as_file_obj( obs_file_name ) obs_dict = json.loads(obs_json_file.read()) @@ -103,7 +103,7 @@ def load_default_regions_and_regions_specs(self): """Gets the default_regions dict and regions_specs dict from default_regions.py and stores them as attributes.""" default_regions_file = ( - pcmdi_metrics.driver.dataset.DataSet.load_path_as_file_obj( + pcmdi_metrics.mean_climate.lib_driver.dataset.DataSet.load_path_as_file_obj( "default_regions.py" ) ) @@ -269,7 +269,7 @@ def determine_obs_or_model(self, is_obs, ref_or_test, data_path): def create_mean_climate_parser(): - parser = pcmdi_metrics.driver.pmp_parser.PMPMetricsParser() + parser = pcmdi_metrics.mean_climate.lib_driver.pmp_parser.PMPMetricsParser() parser.add_argument( "--case_id", dest="case_id", diff --git a/pcmdi_metrics/mean_climate/lib/pmp_parser.py b/pcmdi_metrics/mean_climate/lib/pmp_parser.py index 9442ee143..37fb47f9a 100644 --- a/pcmdi_metrics/mean_climate/lib/pmp_parser.py +++ b/pcmdi_metrics/mean_climate/lib/pmp_parser.py @@ -1,4 +1,4 @@ -import pcmdi_metrics.driver.pmp_parser as pmp_parser +import pcmdi_metrics.mean_climate.lib_driver.pmp_parser as pmp_parser class PMPParser(pmp_parser.PMPParser): @@ -7,5 +7,5 @@ def __init__(self, warning=True, *args, **kwargs): super(PMPParser, self).__init__(*args, **kwargs) if warning: print( - "Deprecation warning: please use 'import pcmdi_metrics.driver.pmp_parser.PMPParser'" + "Deprecation warning: please use 'import pcmdi_metrics.mean_climate.lib_driver.pmp_parser.PMPParser'" ) diff --git a/pcmdi_metrics/mean_climate/lib_driver/model.py b/pcmdi_metrics/mean_climate/lib_driver/model.py index 7a43becba..1bafbffab 100644 --- a/pcmdi_metrics/mean_climate/lib_driver/model.py +++ b/pcmdi_metrics/mean_climate/lib_driver/model.py @@ -5,12 +5,12 @@ import cdutil import MV2 -import pcmdi_metrics.driver.dataset +import pcmdi_metrics.mean_climate.lib_driver.dataset from pcmdi_metrics import LOG_LEVEL from pcmdi_metrics.io.base import Base -class Model(pcmdi_metrics.driver.dataset.DataSet): +class Model(pcmdi_metrics.mean_climate.lib_driver.dataset.DataSet): """Handles all the computation (setting masking, target grid, etc) and some file I/O related to models.""" diff --git a/pcmdi_metrics/mean_climate/lib_driver/observation.py b/pcmdi_metrics/mean_climate/lib_driver/observation.py index 99806e8df..f9a2cf486 100644 --- a/pcmdi_metrics/mean_climate/lib_driver/observation.py +++ b/pcmdi_metrics/mean_climate/lib_driver/observation.py @@ -3,7 +3,7 @@ import MV2 from pcmdi_metrics import LOG_LEVEL -from pcmdi_metrics.driver.dataset import DataSet +from pcmdi_metrics.mean_climate.lib_driver.dataset import DataSet from pcmdi_metrics.io.base import Base try: diff --git a/pcmdi_metrics/mean_climate/lib_driver/outputmetrics.py b/pcmdi_metrics/mean_climate/lib_driver/outputmetrics.py index 1518dc610..5a8443faa 100644 --- a/pcmdi_metrics/mean_climate/lib_driver/outputmetrics.py +++ b/pcmdi_metrics/mean_climate/lib_driver/outputmetrics.py @@ -6,8 +6,8 @@ import pcmdi_metrics from pcmdi_metrics import LOG_LEVEL -from pcmdi_metrics.driver.dataset import DataSet -from pcmdi_metrics.driver.observation import Observation +from pcmdi_metrics.mean_climate.lib_driver.dataset import DataSet +from pcmdi_metrics.mean_climate.lib_driver.observation import Observation from pcmdi_metrics.io.base import Base try: diff --git a/pcmdi_metrics/mean_climate/lib_driver/pmp_parser.py b/pcmdi_metrics/mean_climate/lib_driver/pmp_parser.py index 1f62d4d20..12f67231a 100644 --- a/pcmdi_metrics/mean_climate/lib_driver/pmp_parser.py +++ b/pcmdi_metrics/mean_climate/lib_driver/pmp_parser.py @@ -2,7 +2,7 @@ import cdp.cdp_parser -import pcmdi_metrics.driver.pmp_parameter +import pcmdi_metrics.mean_climate.lib_driver.pmp_parameter from pcmdi_metrics import resources try: @@ -21,7 +21,7 @@ def path_to_default_args(): class PMPParser(cdp.cdp_parser.CDPParser): def __init__(self, *args, **kwargs): super(PMPParser, self).__init__( - pcmdi_metrics.driver.pmp_parameter.PMPParameter, + pcmdi_metrics.mean_climate.lib_driver.pmp_parameter.PMPParameter, path_to_default_args(), *args, **kwargs, @@ -33,7 +33,7 @@ def __init__(self, *args, **kwargs): class PMPMetricsParser(cdp.cdp_parser.CDPParser): def __init__(self, *args, **kwargs): super(PMPMetricsParser, self).__init__( - pcmdi_metrics.driver.pmp_parameter.PMPMetricsParameter, + pcmdi_metrics.mean_climate.lib_driver.pmp_parameter.PMPMetricsParameter, path_to_default_args(), *args, **kwargs, From 14d6a5a83b0dcbb0e068fe8117d0db52f7a07f20 Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Wed, 26 Oct 2022 21:33:13 -0700 Subject: [PATCH 025/130] clean up --- pcmdi_metrics/version.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pcmdi_metrics/version.py b/pcmdi_metrics/version.py index fe3d9624b..25f5604d3 100644 --- a/pcmdi_metrics/version.py +++ b/pcmdi_metrics/version.py @@ -1,3 +1,3 @@ __version__ = 'v2.3.1' -__git_tag_describe__ = 'v2.3.1-26-gacc86e2' -__git_sha1__ = 'acc86e23aa5caf8bf38e3a170b8eef3785edf794' +__git_tag_describe__ = 'v2.3.1-33-gbf91c8b2' +__git_sha1__ = 'bf91c8b2bfc112dd528f7a3110b9b87b2cd26c29' From 98e7e407c9257941e6f1574ebb1eb8329e78cb21 Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Wed, 26 Oct 2022 21:43:26 -0700 Subject: [PATCH 026/130] merge lib directories --- .../{lib_driver => lib}/dataset.py | 0 .../lib/mean_climate_metrics_driver.py | 18 +++---- .../mean_climate/{lib_driver => lib}/model.py | 4 +- .../{lib_driver => lib}/observation.py | 2 +- .../{lib_driver => lib}/outputmetrics.py | 4 +- .../{lib_driver => lib}/pmp_parameter.py | 0 pcmdi_metrics/mean_climate/lib/pmp_parser.py | 49 +++++++++++++++---- .../mean_climate/lib_driver/__init__.py | 0 .../mean_climate/lib_driver/pmp_parser.py | 42 ---------------- .../pcmdi_compute_climatologies.py | 2 +- .../pcmdi_compute_climatologies-CMOR.py | 2 +- 11 files changed, 56 insertions(+), 67 deletions(-) rename pcmdi_metrics/mean_climate/{lib_driver => lib}/dataset.py (100%) rename pcmdi_metrics/mean_climate/{lib_driver => lib}/model.py (97%) rename pcmdi_metrics/mean_climate/{lib_driver => lib}/observation.py (98%) rename pcmdi_metrics/mean_climate/{lib_driver => lib}/outputmetrics.py (99%) rename pcmdi_metrics/mean_climate/{lib_driver => lib}/pmp_parameter.py (100%) delete mode 100644 pcmdi_metrics/mean_climate/lib_driver/__init__.py delete mode 100644 pcmdi_metrics/mean_climate/lib_driver/pmp_parser.py diff --git a/pcmdi_metrics/mean_climate/lib_driver/dataset.py b/pcmdi_metrics/mean_climate/lib/dataset.py similarity index 100% rename from pcmdi_metrics/mean_climate/lib_driver/dataset.py rename to pcmdi_metrics/mean_climate/lib/dataset.py diff --git a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py index b53f32be3..6ac52fd39 100644 --- a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py +++ b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py @@ -3,12 +3,12 @@ import json import logging -import pcmdi_metrics.mean_climate.lib_driver.dataset -import pcmdi_metrics.mean_climate.lib_driver.pmp_parser +import pcmdi_metrics.mean_climate.lib.dataset +import pcmdi_metrics.mean_climate.lib.pmp_parser from pcmdi_metrics import LOG_LEVEL -from pcmdi_metrics.mean_climate.lib_driver.model import Model -from pcmdi_metrics.mean_climate.lib_driver.observation import Observation -from pcmdi_metrics.mean_climate.lib_driver.outputmetrics import OutputMetrics +from pcmdi_metrics.mean_climate.lib.model import Model +from pcmdi_metrics.mean_climate.lib.observation import Observation +from pcmdi_metrics.mean_climate.lib.outputmetrics import OutputMetrics class PMPDriver(object): @@ -37,7 +37,7 @@ def __init__(self, parameter): self.var = "" self.output_metric = None self.region = "" - self.sftlf = pcmdi_metrics.mean_climate.lib_driver.dataset.DataSet.create_sftlf(self.parameter) + self.sftlf = pcmdi_metrics.mean_climate.lib.dataset.DataSet.create_sftlf(self.parameter) self.default_regions = [] self.regions_specs = {} @@ -67,7 +67,7 @@ def load_obs_dict(self): """Loads obs_info_dictionary.json and appends custom_observations from the parameter file if needed.""" obs_file_name = "obs_info_dictionary.json" - obs_json_file = pcmdi_metrics.mean_climate.lib_driver.dataset.DataSet.load_path_as_file_obj( + obs_json_file = pcmdi_metrics.mean_climate.lib.dataset.DataSet.load_path_as_file_obj( obs_file_name ) obs_dict = json.loads(obs_json_file.read()) @@ -103,7 +103,7 @@ def load_default_regions_and_regions_specs(self): """Gets the default_regions dict and regions_specs dict from default_regions.py and stores them as attributes.""" default_regions_file = ( - pcmdi_metrics.mean_climate.lib_driver.dataset.DataSet.load_path_as_file_obj( + pcmdi_metrics.mean_climate.lib.dataset.DataSet.load_path_as_file_obj( "default_regions.py" ) ) @@ -269,7 +269,7 @@ def determine_obs_or_model(self, is_obs, ref_or_test, data_path): def create_mean_climate_parser(): - parser = pcmdi_metrics.mean_climate.lib_driver.pmp_parser.PMPMetricsParser() + parser = pcmdi_metrics.mean_climate.lib.pmp_parser.PMPMetricsParser() parser.add_argument( "--case_id", dest="case_id", diff --git a/pcmdi_metrics/mean_climate/lib_driver/model.py b/pcmdi_metrics/mean_climate/lib/model.py similarity index 97% rename from pcmdi_metrics/mean_climate/lib_driver/model.py rename to pcmdi_metrics/mean_climate/lib/model.py index 1bafbffab..2588fd479 100644 --- a/pcmdi_metrics/mean_climate/lib_driver/model.py +++ b/pcmdi_metrics/mean_climate/lib/model.py @@ -5,12 +5,12 @@ import cdutil import MV2 -import pcmdi_metrics.mean_climate.lib_driver.dataset +import pcmdi_metrics.mean_climate.lib.dataset from pcmdi_metrics import LOG_LEVEL from pcmdi_metrics.io.base import Base -class Model(pcmdi_metrics.mean_climate.lib_driver.dataset.DataSet): +class Model(pcmdi_metrics.mean_climate.lib.dataset.DataSet): """Handles all the computation (setting masking, target grid, etc) and some file I/O related to models.""" diff --git a/pcmdi_metrics/mean_climate/lib_driver/observation.py b/pcmdi_metrics/mean_climate/lib/observation.py similarity index 98% rename from pcmdi_metrics/mean_climate/lib_driver/observation.py rename to pcmdi_metrics/mean_climate/lib/observation.py index f9a2cf486..001889641 100644 --- a/pcmdi_metrics/mean_climate/lib_driver/observation.py +++ b/pcmdi_metrics/mean_climate/lib/observation.py @@ -3,7 +3,7 @@ import MV2 from pcmdi_metrics import LOG_LEVEL -from pcmdi_metrics.mean_climate.lib_driver.dataset import DataSet +from pcmdi_metrics.mean_climate.lib.dataset import DataSet from pcmdi_metrics.io.base import Base try: diff --git a/pcmdi_metrics/mean_climate/lib_driver/outputmetrics.py b/pcmdi_metrics/mean_climate/lib/outputmetrics.py similarity index 99% rename from pcmdi_metrics/mean_climate/lib_driver/outputmetrics.py rename to pcmdi_metrics/mean_climate/lib/outputmetrics.py index 5a8443faa..97718ff39 100644 --- a/pcmdi_metrics/mean_climate/lib_driver/outputmetrics.py +++ b/pcmdi_metrics/mean_climate/lib/outputmetrics.py @@ -6,8 +6,8 @@ import pcmdi_metrics from pcmdi_metrics import LOG_LEVEL -from pcmdi_metrics.mean_climate.lib_driver.dataset import DataSet -from pcmdi_metrics.mean_climate.lib_driver.observation import Observation +from pcmdi_metrics.mean_climate.lib.dataset import DataSet +from pcmdi_metrics.mean_climate.lib.observation import Observation from pcmdi_metrics.io.base import Base try: diff --git a/pcmdi_metrics/mean_climate/lib_driver/pmp_parameter.py b/pcmdi_metrics/mean_climate/lib/pmp_parameter.py similarity index 100% rename from pcmdi_metrics/mean_climate/lib_driver/pmp_parameter.py rename to pcmdi_metrics/mean_climate/lib/pmp_parameter.py diff --git a/pcmdi_metrics/mean_climate/lib/pmp_parser.py b/pcmdi_metrics/mean_climate/lib/pmp_parser.py index 37fb47f9a..249496e75 100644 --- a/pcmdi_metrics/mean_climate/lib/pmp_parser.py +++ b/pcmdi_metrics/mean_climate/lib/pmp_parser.py @@ -1,11 +1,42 @@ -import pcmdi_metrics.mean_climate.lib_driver.pmp_parser as pmp_parser +import os +import cdp.cdp_parser -class PMPParser(pmp_parser.PMPParser): - def __init__(self, warning=True, *args, **kwargs): - # conflict_handler='resolve' lets new args override older ones - super(PMPParser, self).__init__(*args, **kwargs) - if warning: - print( - "Deprecation warning: please use 'import pcmdi_metrics.mean_climate.lib_driver.pmp_parser.PMPParser'" - ) +import pcmdi_metrics.mean_climate.lib.pmp_parameter +from pcmdi_metrics import resources + +try: + basestring # noqa +except Exception: + basestring = str + + +def path_to_default_args(): + """Returns path to Default Common Input Arguments in package egg.""" + egg_pth = resources.resource_path() + file_path = os.path.join(egg_pth, "DefArgsCIA.json") + return file_path + + +class PMPParser(cdp.cdp_parser.CDPParser): + def __init__(self, *args, **kwargs): + super(PMPParser, self).__init__( + pcmdi_metrics.mean_climate.lib.pmp_parameter.PMPParameter, + path_to_default_args(), + *args, + **kwargs, + ) + self.use("parameters") + self.use("diags") + + +class PMPMetricsParser(cdp.cdp_parser.CDPParser): + def __init__(self, *args, **kwargs): + super(PMPMetricsParser, self).__init__( + pcmdi_metrics.mean_climate.lib.pmp_parameter.PMPMetricsParameter, + path_to_default_args(), + *args, + **kwargs, + ) + self.use("parameters") + self.use("diags") diff --git a/pcmdi_metrics/mean_climate/lib_driver/__init__.py b/pcmdi_metrics/mean_climate/lib_driver/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/pcmdi_metrics/mean_climate/lib_driver/pmp_parser.py b/pcmdi_metrics/mean_climate/lib_driver/pmp_parser.py deleted file mode 100644 index 12f67231a..000000000 --- a/pcmdi_metrics/mean_climate/lib_driver/pmp_parser.py +++ /dev/null @@ -1,42 +0,0 @@ -import os - -import cdp.cdp_parser - -import pcmdi_metrics.mean_climate.lib_driver.pmp_parameter -from pcmdi_metrics import resources - -try: - basestring # noqa -except Exception: - basestring = str - - -def path_to_default_args(): - """Returns path to Default Common Input Arguments in package egg.""" - egg_pth = resources.resource_path() - file_path = os.path.join(egg_pth, "DefArgsCIA.json") - return file_path - - -class PMPParser(cdp.cdp_parser.CDPParser): - def __init__(self, *args, **kwargs): - super(PMPParser, self).__init__( - pcmdi_metrics.mean_climate.lib_driver.pmp_parameter.PMPParameter, - path_to_default_args(), - *args, - **kwargs, - ) - self.use("parameters") - self.use("diags") - - -class PMPMetricsParser(cdp.cdp_parser.CDPParser): - def __init__(self, *args, **kwargs): - super(PMPMetricsParser, self).__init__( - pcmdi_metrics.mean_climate.lib_driver.pmp_parameter.PMPMetricsParameter, - path_to_default_args(), - *args, - **kwargs, - ) - self.use("parameters") - self.use("diags") diff --git a/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py b/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py index 148ed962d..e16444444 100644 --- a/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py +++ b/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py @@ -103,7 +103,7 @@ def clim_calc(var, infile, outfile=None, outpath=None, outfilename=None, start=N ver = datetime.datetime.now().strftime("v%Y%m%d") - P = pcmdi_metrics.driver.pmp_parser.PMPMetricsParser() + P = pcmdi_metrics.mean_climate.lib.pmp_parser.PMPMetricsParser() P.add_argument( "--vars", dest="vars", help="List of variables", nargs="+", required=False diff --git a/pcmdi_metrics/mean_climate/scripts/pcmdi_compute_climatologies-CMOR.py b/pcmdi_metrics/mean_climate/scripts/pcmdi_compute_climatologies-CMOR.py index 954c6203c..03e2aecd0 100644 --- a/pcmdi_metrics/mean_climate/scripts/pcmdi_compute_climatologies-CMOR.py +++ b/pcmdi_metrics/mean_climate/scripts/pcmdi_compute_climatologies-CMOR.py @@ -12,7 +12,7 @@ import genutil import numpy -from pcmdi_metrics.driver.pmp_parser import PMPParser +from pcmdi_metrics.mean_climate.lib.pmp_parser import PMPParser try: import cmor From 829fc9e14a4af22a9a75bc27cc537e68f2bfff19 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Wed, 26 Oct 2022 21:45:40 -0700 Subject: [PATCH 027/130] clean up --- pcmdi_metrics/version.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pcmdi_metrics/version.py b/pcmdi_metrics/version.py index fe3d9624b..84f52a493 100644 --- a/pcmdi_metrics/version.py +++ b/pcmdi_metrics/version.py @@ -1,3 +1,3 @@ __version__ = 'v2.3.1' -__git_tag_describe__ = 'v2.3.1-26-gacc86e2' -__git_sha1__ = 'acc86e23aa5caf8bf38e3a170b8eef3785edf794' +__git_tag_describe__ = 'v2.3.1-33-gbf91c8b' +__git_sha1__ = 'bf91c8b2bfc112dd528f7a3110b9b87b2cd26c29' From ca9d6247de6ea8abd8a95b268b30850c8aa0ef6d Mon Sep 17 00:00:00 2001 From: lee1043 Date: Wed, 26 Oct 2022 22:30:28 -0700 Subject: [PATCH 028/130] clean up, chmod --- pcmdi_metrics/mean_climate/lib/__init__.py | 40 +++++++++++-------- .../mean_climate/mean_climate_driver.py | 0 .../pcmdi_compute_climatologies.py | 0 pcmdi_metrics/version.py | 4 +- setup.py | 2 +- 5 files changed, 27 insertions(+), 19 deletions(-) mode change 100644 => 100755 pcmdi_metrics/mean_climate/mean_climate_driver.py mode change 100644 => 100755 pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py diff --git a/pcmdi_metrics/mean_climate/lib/__init__.py b/pcmdi_metrics/mean_climate/lib/__init__.py index 1fc5a97e3..2ae85617d 100644 --- a/pcmdi_metrics/mean_climate/lib/__init__.py +++ b/pcmdi_metrics/mean_climate/lib/__init__.py @@ -1,18 +1,26 @@ -from . import annual_mean # noqa -from . import bias_xy # noqa -from . import cor_xy # noqa -from . import cor_xyt # noqa -from . import io # noqa -from . import mean_xy # noqa -from . import meanabs_xy # noqa -from . import pmp_parser # noqa -from . import rms_0 # noqa -from . import rms_xy # noqa -from . import rms_xyt # noqa -from . import rmsc_xy # noqa -from . import seasonal_mean # noqa -from . import std_xy # noqa -from . import std_xyt # noqa -from . import zonal_mean # noqa +from . import ( # noqa + annual_mean, + bias_xy, + cor_xy, + cor_xyt, + io, + mean_xy, + meanabs_xy, + pmp_parser, + rms_0, + rms_xy, + rms_xyt, + rmsc_xy, + seasonal_mean, + std_xy, + std_xyt, + zonal_mean, + pmp_parameter, + outputmetrics, + observation, + model, + dataset, +) from .mean_climate_metrics_calculations import compute_metrics # noqa from .mean_climate_metrics_driver import PMPDriver, create_mean_climate_parser # noqa + diff --git a/pcmdi_metrics/mean_climate/mean_climate_driver.py b/pcmdi_metrics/mean_climate/mean_climate_driver.py old mode 100644 new mode 100755 diff --git a/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py b/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py old mode 100644 new mode 100755 diff --git a/pcmdi_metrics/version.py b/pcmdi_metrics/version.py index 25f5604d3..e79a1dda8 100644 --- a/pcmdi_metrics/version.py +++ b/pcmdi_metrics/version.py @@ -1,3 +1,3 @@ __version__ = 'v2.3.1' -__git_tag_describe__ = 'v2.3.1-33-gbf91c8b2' -__git_sha1__ = 'bf91c8b2bfc112dd528f7a3110b9b87b2cd26c29' +__git_tag_describe__ = 'v2.3.1-37-gff043f6' +__git_sha1__ = 'ff043f64fdaf0347bd276d11926735ff3b3aaa3e' diff --git a/setup.py b/setup.py index cae53b1ba..c7f891f40 100755 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ from setuptools import find_packages, setup -Version = "2.5" +Version = "2.5.0" p = subprocess.Popen( ("git", "describe", "--tags"), stdin=subprocess.PIPE, From dc67c92d9cfa204c33b38e455ef9ca9fe20628c6 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Wed, 26 Oct 2022 22:44:20 -0700 Subject: [PATCH 029/130] clean up --- pcmdi_metrics/__init__.py | 3 ++- pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py | 1 + pcmdi_metrics/version.py | 4 ++-- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/pcmdi_metrics/__init__.py b/pcmdi_metrics/__init__.py index 92933f209..fde228f02 100644 --- a/pcmdi_metrics/__init__.py +++ b/pcmdi_metrics/__init__.py @@ -15,5 +15,6 @@ plog.addHandler(ch) plog.setLevel(LOG_LEVEL) from . import io # noqa -from . import pcmdi # noqa +#from . import pcmdi # noqa +from . import mean_climate # noqa from .version import __git_sha1__, __git_tag_describe__, __version__ # noqa diff --git a/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py b/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py index e16444444..1edb191a2 100755 --- a/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py +++ b/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py @@ -1,4 +1,5 @@ #!/usr/bin/env python + import datetime import os import dask diff --git a/pcmdi_metrics/version.py b/pcmdi_metrics/version.py index e79a1dda8..4afd23a0c 100644 --- a/pcmdi_metrics/version.py +++ b/pcmdi_metrics/version.py @@ -1,3 +1,3 @@ __version__ = 'v2.3.1' -__git_tag_describe__ = 'v2.3.1-37-gff043f6' -__git_sha1__ = 'ff043f64fdaf0347bd276d11926735ff3b3aaa3e' +__git_tag_describe__ = 'v2.3.1-38-gca9d624' +__git_sha1__ = 'ca9d6247de6ea8abd8a95b268b30850c8aa0ef6d' From 47ccc2071634f3a2a8a6725503caa9ea6c3ef9b4 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Wed, 26 Oct 2022 23:34:52 -0700 Subject: [PATCH 030/130] clean up --- pcmdi_metrics/version.py | 4 ++-- setup.py | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/pcmdi_metrics/version.py b/pcmdi_metrics/version.py index 4afd23a0c..81674133f 100644 --- a/pcmdi_metrics/version.py +++ b/pcmdi_metrics/version.py @@ -1,3 +1,3 @@ __version__ = 'v2.3.1' -__git_tag_describe__ = 'v2.3.1-38-gca9d624' -__git_sha1__ = 'ca9d6247de6ea8abd8a95b268b30850c8aa0ef6d' +__git_tag_describe__ = 'v2.3.1-39-gdc67c92' +__git_sha1__ = 'dc67c92d9cfa204c33b38e455ef9ca9fe20628c6' diff --git a/setup.py b/setup.py index c7f891f40..412814dc0 100755 --- a/setup.py +++ b/setup.py @@ -42,7 +42,8 @@ p = subprocess.Popen(["python", "setup_default_args.py"], cwd="share") p.communicate() -packages = find_packages() +packages = find_packages(exclude=["cmec", "tests"]) + scripts = [ "pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py", "pcmdi_metrics/mean_climate/mean_climate_driver.py", From 89eb5916b3b68036a359a38fc714de98a915c6ae Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Wed, 26 Oct 2022 23:35:23 -0700 Subject: [PATCH 031/130] clean up --- pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py b/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py index 1edb191a2..2083aa6d7 100755 --- a/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py +++ b/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py @@ -104,7 +104,7 @@ def clim_calc(var, infile, outfile=None, outpath=None, outfilename=None, start=N ver = datetime.datetime.now().strftime("v%Y%m%d") - P = pcmdi_metrics.mean_climate.lib.pmp_parser.PMPMetricsParser() + P = pcmdi_metrics.mean_climate.pmp_parser.PMPMetricsParser() P.add_argument( "--vars", dest="vars", help="List of variables", nargs="+", required=False From 10b0aeaf676df0b66258c208b4298145f92e4f08 Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Thu, 27 Oct 2022 09:22:46 -0700 Subject: [PATCH 032/130] gather statistics compute functions in one file --- pcmdi_metrics/mean_climate/lib/__init__.py | 16 +- pcmdi_metrics/mean_climate/lib/annual_mean.py | 18 -- pcmdi_metrics/mean_climate/lib/bias_xy.py | 18 -- pcmdi_metrics/mean_climate/lib/bias_xyt.py | 18 -- .../mean_climate/lib/compute_statistics.py | 238 ++++++++++++++++++ pcmdi_metrics/mean_climate/lib/cor_xy.py | 14 -- pcmdi_metrics/mean_climate/lib/cor_xyt.py | 14 -- pcmdi_metrics/mean_climate/lib/mean_xy.py | 17 -- pcmdi_metrics/mean_climate/lib/meanabs_xy.py | 19 -- pcmdi_metrics/mean_climate/lib/meanabs_xyt.py | 19 -- pcmdi_metrics/mean_climate/lib/rms_0.py | 17 -- pcmdi_metrics/mean_climate/lib/rms_xy.py | 14 -- pcmdi_metrics/mean_climate/lib/rms_xyt.py | 14 -- pcmdi_metrics/mean_climate/lib/rmsc_xy.py | 16 -- .../mean_climate/lib/seasonal_mean.py | 30 --- pcmdi_metrics/mean_climate/lib/std_xy.py | 14 -- pcmdi_metrics/mean_climate/lib/std_xyt.py | 14 -- pcmdi_metrics/mean_climate/lib/zonal_mean.py | 15 -- pcmdi_metrics/version.py | 4 +- 19 files changed, 249 insertions(+), 280 deletions(-) delete mode 100644 pcmdi_metrics/mean_climate/lib/annual_mean.py delete mode 100644 pcmdi_metrics/mean_climate/lib/bias_xy.py delete mode 100644 pcmdi_metrics/mean_climate/lib/bias_xyt.py create mode 100644 pcmdi_metrics/mean_climate/lib/compute_statistics.py delete mode 100644 pcmdi_metrics/mean_climate/lib/cor_xy.py delete mode 100644 pcmdi_metrics/mean_climate/lib/cor_xyt.py delete mode 100644 pcmdi_metrics/mean_climate/lib/mean_xy.py delete mode 100644 pcmdi_metrics/mean_climate/lib/meanabs_xy.py delete mode 100644 pcmdi_metrics/mean_climate/lib/meanabs_xyt.py delete mode 100644 pcmdi_metrics/mean_climate/lib/rms_0.py delete mode 100644 pcmdi_metrics/mean_climate/lib/rms_xy.py delete mode 100644 pcmdi_metrics/mean_climate/lib/rms_xyt.py delete mode 100644 pcmdi_metrics/mean_climate/lib/rmsc_xy.py delete mode 100644 pcmdi_metrics/mean_climate/lib/seasonal_mean.py delete mode 100644 pcmdi_metrics/mean_climate/lib/std_xy.py delete mode 100644 pcmdi_metrics/mean_climate/lib/std_xyt.py delete mode 100644 pcmdi_metrics/mean_climate/lib/zonal_mean.py diff --git a/pcmdi_metrics/mean_climate/lib/__init__.py b/pcmdi_metrics/mean_climate/lib/__init__.py index 2ae85617d..af30ffdde 100644 --- a/pcmdi_metrics/mean_climate/lib/__init__.py +++ b/pcmdi_metrics/mean_climate/lib/__init__.py @@ -1,12 +1,10 @@ -from . import ( # noqa +from .compute_statistics import ( # noqa annual_mean, bias_xy, cor_xy, cor_xyt, - io, mean_xy, meanabs_xy, - pmp_parser, rms_0, rms_xy, rms_xyt, @@ -15,12 +13,16 @@ std_xy, std_xyt, zonal_mean, +) +from .mean_climate_metrics_calculations import compute_metrics # noqa +from .mean_climate_metrics_driver import PMPDriver, create_mean_climate_parser # noqa + +from . import( # noqa + io, + pmp_parser, pmp_parameter, outputmetrics, observation, model, dataset, -) -from .mean_climate_metrics_calculations import compute_metrics # noqa -from .mean_climate_metrics_driver import PMPDriver, create_mean_climate_parser # noqa - +) \ No newline at end of file diff --git a/pcmdi_metrics/mean_climate/lib/annual_mean.py b/pcmdi_metrics/mean_climate/lib/annual_mean.py deleted file mode 100644 index c9fb5d67a..000000000 --- a/pcmdi_metrics/mean_climate/lib/annual_mean.py +++ /dev/null @@ -1,18 +0,0 @@ -import cdms2 -import cdutil - - -def compute(dm, do): - """Computes ANNUAL MEAN""" - if dm is None and do is None: # just want the doc - return { - "Name": "Annual Mean", - "Abstract": "Compute Annual Mean", - "URI": "http://uvcdat.llnl.gov/documentation/" - + "utilities/utilities-2.html", - "Contact": "pcmdi-metrics@llnl.gov", - "Comments": "Assumes input are 12 months climatology", - } - # Do we really want this? Wouldn't it better to let it fails - cdms2.setAutoBounds("on") - return cdutil.averager(dm, axis="t"), cdutil.averager(do, axis="t") diff --git a/pcmdi_metrics/mean_climate/lib/bias_xy.py b/pcmdi_metrics/mean_climate/lib/bias_xy.py deleted file mode 100644 index b9d0e6bdb..000000000 --- a/pcmdi_metrics/mean_climate/lib/bias_xy.py +++ /dev/null @@ -1,18 +0,0 @@ -import cdutil -import MV2 - - -def compute(dm, do): - """Computes bias""" - if dm is None and do is None: # just want the doc - return { - "Name": "Bias", - "Abstract": "Compute Full Average of Model - Observation", - "Contact": "pcmdi-metrics@llnl.gov", - } - dif = MV2.subtract(dm, do) - return MV2.float(cdutil.averager(dif, axis="xy", weights="weighted")) - - -# return MV2.float(MV2.average(MV2.subtract(dm, do))) deprecated - does -# not use area weights diff --git a/pcmdi_metrics/mean_climate/lib/bias_xyt.py b/pcmdi_metrics/mean_climate/lib/bias_xyt.py deleted file mode 100644 index 654fa5890..000000000 --- a/pcmdi_metrics/mean_climate/lib/bias_xyt.py +++ /dev/null @@ -1,18 +0,0 @@ -import cdutil -import MV2 - - -def compute(dm, do): - """Computes bias""" - if dm is None and do is None: # just want the doc - return { - "Name": "Bias", - "Abstract": "Compute Full Average of Model - Observation", - "Contact": "pcmdi-metrics@llnl.gov", - } - dif = MV2.subtract(dm, do) - return MV2.float(cdutil.averager(dif, axis="xyt", weights="weighted")) - - -# return MV2.float(MV2.average(MV2.subtract(dm, do))) deprecated - does -# not use area weights diff --git a/pcmdi_metrics/mean_climate/lib/compute_statistics.py b/pcmdi_metrics/mean_climate/lib/compute_statistics.py new file mode 100644 index 000000000..be8e957f3 --- /dev/null +++ b/pcmdi_metrics/mean_climate/lib/compute_statistics.py @@ -0,0 +1,238 @@ +import cdms2 +import cdutil +import genutil +import MV2 + + +def annual_mean(dm, do): + """Computes ANNUAL MEAN""" + if dm is None and do is None: # just want the doc + return { + "Name": "Annual Mean", + "Abstract": "Compute Annual Mean", + "URI": "http://uvcdat.llnl.gov/documentation/" + + "utilities/utilities-2.html", + "Contact": "pcmdi-metrics@llnl.gov", + "Comments": "Assumes input are 12 months climatology", + } + # Do we really want this? Wouldn't it better to let it fails + cdms2.setAutoBounds("on") + return cdutil.averager(dm, axis="t"), cdutil.averager(do, axis="t") + + +def bias_xy(dm, do): + """Computes bias""" + if dm is None and do is None: # just want the doc + return { + "Name": "Bias", + "Abstract": "Compute Full Average of Model - Observation", + "Contact": "pcmdi-metrics@llnl.gov", + } + dif = MV2.subtract(dm, do) + return MV2.float(cdutil.averager(dif, axis="xy", weights="weighted")) + + +def bias_xyt(dm, do): + """Computes bias""" + if dm is None and do is None: # just want the doc + return { + "Name": "Bias", + "Abstract": "Compute Full Average of Model - Observation", + "Contact": "pcmdi-metrics@llnl.gov", + } + dif = MV2.subtract(dm, do) + return MV2.float(cdutil.averager(dif, axis="xyt", weights="weighted")) + + +def cor_xy(dm, do): + """Computes correlation""" + if dm is None and do is None: # just want the doc + return { + "Name": "Spatial Correlation", + "Abstract": "Compute Spatial Correlation", + "URI": "http://uvcdat.llnl.gov/documentation/utilities/" + + "utilities-2.html", + "Contact": "pcmdi-metrics@llnl.gov", + } + return float(genutil.statistics.correlation(dm, do, axis="xy", weights="weighted")) + + +def cor_xyt(dm, do): + """Computes correlation""" + if dm is None and do is None: # just want the doc + return { + "Name": "Spatial and Temporal Correlation", + "Abstract": "Compute Spatio-Temporal Correlation", + "URI": "http://uvcdat.llnl.gov/documentation/utilities/" + + "utilities-2.html", + "Contact": "pcmdi-metrics@llnl.gov", + } + return float(genutil.statistics.correlation(dm, do, axis="xyt", weights="weighted")) + + +def mean_xy(d): + """Computes bias""" + if d is None: # just want the doc + return { + "Name": "Mean", + "Abstract": "Area Mean (area weighted)", + "Contact": "pcmdi-metrics@llnl.gov", + } + return MV2.float(cdutil.averager(d, axis="xy", weights="weighted")) + + +def meanabs_xy(dm, do): + """Computes Mean Absolute Error""" + if dm is None and do is None: # just want the doc + return { + "Name": "Mean Absolute Error", + "Abstract": "Compute Full Average of " + + "Absolute Difference Between Model And Observation", + "Contact": "pcmdi-metrics@llnl.gov", + } + absdif = MV2.absolute(MV2.subtract(dm, do)) + mae = cdutil.averager(absdif, axis="xy", weights="weighted") + return float(mae) + + +def meanabs_xyt(dm, do): + """Computes Mean Absolute Error""" + if dm is None and do is None: # just want the doc + return { + "Name": "Mean Absolute Error", + "Abstract": "Compute Full Average of " + + "Absolute Difference Between Model And Observation", + "Contact": "pcmdi-metrics@llnl.gov", + } + absdif = MV2.absolute(MV2.subtract(dm, do)) + mae = cdutil.averager(absdif, axis="xyt", weights="weighted") + return float(mae) + + +def rms_0(dm, do): + """Computes rms over first axis""" + if dm is None and do is None: # just want the doc + return { + "Name": "Root Mean Square over First Axis", + "Abstract": "Compute Root Mean Square over the first axis", + "URI": "http://uvcdat.llnl.gov/documentation/" + + "utilities/utilities-2.html", + "Contact": "pcmdi-metrics@llnl.gov", + } + if 1 in [x.isLevel() for x in dm.getAxisList()]: + dm = dm(squeeze=1) + do = do(squeeze=1) + return float(genutil.statistics.rms(dm, do)) + + +def rms_xy(dm, do): + """Computes rms""" + if dm is None and do is None: # just want the doc + return { + "Name": "Spatial Root Mean Square", + "Abstract": "Compute Spatial Root Mean Square", + "URI": "http://uvcdat.llnl.gov/documentation/" + + "utilities/utilities-2.html", + "Contact": "pcmdi-metrics@llnl.gov", + } + return float(genutil.statistics.rms(dm, do, axis="xy", weights="weighted")) + + +def rms_xyt(dm, do): + """Computes rms""" + if dm is None and do is None: # just want the doc + return { + "Name": "Spatio-Temporal Root Mean Square", + "Abstract": "Compute Spatial and Temporal Root Mean Square", + "URI": "http://uvcdat.llnl.gov/documentation/" + + "utilities/utilities-2.html", + "Contact": "pcmdi-metrics@llnl.gov", + } + return float(genutil.statistics.rms(dm, do, axis="xyt", weights="weighted")) + + +def rmsc_xy(dm, do): + """Computes centered rms""" + if dm is None and do is None: # just want the doc + return { + "Name": "Spatial Root Mean Square", + "Abstract": "Compute Centered Spatial Root Mean Square", + "URI": "http://uvcdat.llnl.gov/documentation/" + + "utilities/utilities-2.html", + "Contact": "pcmdi-metrics@llnl.gov", + } + return float( + genutil.statistics.rms(dm, do, axis="xy", centered=1, weights="weighted") + ) + + +def seasonal_mean(d, sea): + """Computes SEASONAL MEAN""" + if d is None and sea is None: # just want the doc + return { + "Name": "Seasonal Mean", + "Abstract": "Compute Seasonal Mean", + "Contact": "pcmdi-metrics@llnl.gov", + "Comments": "Assumes input are 12 months climatology", + } + + mo_wts = [31, 31, 28.25, 31, 30, 31, 30, 31, 31, 30, 31, 30] + + if sea == "djf": + indx = [11, 0, 1] + if sea == "mam": + indx = [2, 3, 4] + if sea == "jja": + indx = [5, 6, 7] + if sea == "son": + indx = [8, 9, 10] + + sea_no_days = mo_wts[indx[0]] + mo_wts[indx[1]] + mo_wts[indx[2]] + + d_sea = ( + d[indx[0]] * mo_wts[indx[0]] + + d[indx[1]] * mo_wts[indx[1]] + + d[indx[2]] * mo_wts[indx[2]] + ) / sea_no_days + + return d_sea + + +def std_xy(d): + """Computes std""" + if d is None: # just want the doc + return { + "Name": "Spatial Standard Deviation", + "Abstract": "Compute Spatial Standard Deviation", + "URI": "http://uvcdat.llnl.gov/documentation/" + + "utilities/utilities-2.html", + "Contact": "pcmdi-metrics@llnl.gov", + } + return float(genutil.statistics.std(d, axis="xy", weights="weighted")) + + +def std_xyt(d): + """Computes std""" + if d is None: # just want the doc + return { + "Name": "Spatial-temporal Standard Deviation", + "Abstract": "Compute Space-Time Standard Deviation", + "URI": "http://uvcdat.llnl.gov/documentation/" + + "utilities/utilities-2.html", + "Contact": "pcmdi-metrics@llnl.gov", + } + return float(genutil.statistics.std(d, axis="xyt", weights="weighted")) + + +def zonal_mean(dm, do): + """Computes ZONAL MEAN assumes rectilinear/regular grid""" + if dm is None and do is None: # just want the doc + return { + "Name": "Zonal Mean", + "Abstract": "Compute Zonal Mean", + "URI": "http://uvcdat.llnl.gov/documentation/" + + "utilities/utilities-2.html", + "Contact": "pcmdi-metrics@llnl.gov", + "Comments": "", + } + return cdutil.averager(dm, axis="x"), cdutil.averager(do, axis="x") \ No newline at end of file diff --git a/pcmdi_metrics/mean_climate/lib/cor_xy.py b/pcmdi_metrics/mean_climate/lib/cor_xy.py deleted file mode 100644 index 985b6c03e..000000000 --- a/pcmdi_metrics/mean_climate/lib/cor_xy.py +++ /dev/null @@ -1,14 +0,0 @@ -import genutil - - -def compute(dm, do): - """Computes correlation""" - if dm is None and do is None: # just want the doc - return { - "Name": "Spatial Correlation", - "Abstract": "Compute Spatial Correlation", - "URI": "http://uvcdat.llnl.gov/documentation/utilities/" - + "utilities-2.html", - "Contact": "pcmdi-metrics@llnl.gov", - } - return float(genutil.statistics.correlation(dm, do, axis="xy", weights="weighted")) diff --git a/pcmdi_metrics/mean_climate/lib/cor_xyt.py b/pcmdi_metrics/mean_climate/lib/cor_xyt.py deleted file mode 100644 index a345712d3..000000000 --- a/pcmdi_metrics/mean_climate/lib/cor_xyt.py +++ /dev/null @@ -1,14 +0,0 @@ -import genutil - - -def compute(dm, do): - """Computes correlation""" - if dm is None and do is None: # just want the doc - return { - "Name": "Spatial and Temporal Correlation", - "Abstract": "Compute Spatio-Temporal Correlation", - "URI": "http://uvcdat.llnl.gov/documentation/utilities/" - + "utilities-2.html", - "Contact": "pcmdi-metrics@llnl.gov", - } - return float(genutil.statistics.correlation(dm, do, axis="xyt", weights="weighted")) diff --git a/pcmdi_metrics/mean_climate/lib/mean_xy.py b/pcmdi_metrics/mean_climate/lib/mean_xy.py deleted file mode 100644 index eb6779c19..000000000 --- a/pcmdi_metrics/mean_climate/lib/mean_xy.py +++ /dev/null @@ -1,17 +0,0 @@ -import cdutil -import MV2 - - -def compute(d): - """Computes bias""" - if d is None: # just want the doc - return { - "Name": "Mean", - "Abstract": "Area Mean (area weighted)", - "Contact": "pcmdi-metrics@llnl.gov", - } - return MV2.float(cdutil.averager(d, axis="xy", weights="weighted")) - - -# return MV2.float(MV2.average(MV2.subtract(dm, do))) deprecated - does -# not use area weights diff --git a/pcmdi_metrics/mean_climate/lib/meanabs_xy.py b/pcmdi_metrics/mean_climate/lib/meanabs_xy.py deleted file mode 100644 index 58da3a31f..000000000 --- a/pcmdi_metrics/mean_climate/lib/meanabs_xy.py +++ /dev/null @@ -1,19 +0,0 @@ -import cdutil -import MV2 - - -def compute(dm, do): - """Computes Mean Absolute Error""" - if dm is None and do is None: # just want the doc - return { - "Name": "Mean Absolute Error", - "Abstract": "Compute Full Average of " - + "Absolute Difference Between Model And Observation", - "Contact": "pcmdi-metrics@llnl.gov", - } - absdif = MV2.absolute(MV2.subtract(dm, do)) - mae = cdutil.averager(absdif, axis="xy", weights="weighted") - - # mae = MV.average(MV.absolute(MV.subtract(dm, do))) - depricated ... did - # not include area weights - return float(mae) diff --git a/pcmdi_metrics/mean_climate/lib/meanabs_xyt.py b/pcmdi_metrics/mean_climate/lib/meanabs_xyt.py deleted file mode 100644 index 5fc8e566f..000000000 --- a/pcmdi_metrics/mean_climate/lib/meanabs_xyt.py +++ /dev/null @@ -1,19 +0,0 @@ -import cdutil -import MV2 - - -def compute(dm, do): - """Computes Mean Absolute Error""" - if dm is None and do is None: # just want the doc - return { - "Name": "Mean Absolute Error", - "Abstract": "Compute Full Average of " - + "Absolute Difference Between Model And Observation", - "Contact": "pcmdi-metrics@llnl.gov", - } - absdif = MV2.absolute(MV2.subtract(dm, do)) - mae = cdutil.averager(absdif, axis="xyt", weights="weighted") - - # mae = MV.average(MV.absolute(MV.subtract(dm, do))) - depricated ... did - # not include area weights - return float(mae) diff --git a/pcmdi_metrics/mean_climate/lib/rms_0.py b/pcmdi_metrics/mean_climate/lib/rms_0.py deleted file mode 100644 index f0db1284b..000000000 --- a/pcmdi_metrics/mean_climate/lib/rms_0.py +++ /dev/null @@ -1,17 +0,0 @@ -import genutil - - -def compute(dm, do): - """Computes rms over first axis""" - if dm is None and do is None: # just want the doc - return { - "Name": "Root Mean Square over First Axis", - "Abstract": "Compute Root Mean Square over the first axis", - "URI": "http://uvcdat.llnl.gov/documentation/" - + "utilities/utilities-2.html", - "Contact": "pcmdi-metrics@llnl.gov", - } - if 1 in [x.isLevel() for x in dm.getAxisList()]: - dm = dm(squeeze=1) - do = do(squeeze=1) - return float(genutil.statistics.rms(dm, do)) diff --git a/pcmdi_metrics/mean_climate/lib/rms_xy.py b/pcmdi_metrics/mean_climate/lib/rms_xy.py deleted file mode 100644 index 06c477524..000000000 --- a/pcmdi_metrics/mean_climate/lib/rms_xy.py +++ /dev/null @@ -1,14 +0,0 @@ -import genutil - - -def compute(dm, do): - """Computes rms""" - if dm is None and do is None: # just want the doc - return { - "Name": "Spatial Root Mean Square", - "Abstract": "Compute Spatial Root Mean Square", - "URI": "http://uvcdat.llnl.gov/documentation/" - + "utilities/utilities-2.html", - "Contact": "pcmdi-metrics@llnl.gov", - } - return float(genutil.statistics.rms(dm, do, axis="xy", weights="weighted")) diff --git a/pcmdi_metrics/mean_climate/lib/rms_xyt.py b/pcmdi_metrics/mean_climate/lib/rms_xyt.py deleted file mode 100644 index 7d0eae0b8..000000000 --- a/pcmdi_metrics/mean_climate/lib/rms_xyt.py +++ /dev/null @@ -1,14 +0,0 @@ -import genutil - - -def compute(dm, do): - """Computes rms""" - if dm is None and do is None: # just want the doc - return { - "Name": "Spatio-Temporal Root Mean Square", - "Abstract": "Compute Spatial and Temporal Root Mean Square", - "URI": "http://uvcdat.llnl.gov/documentation/" - + "utilities/utilities-2.html", - "Contact": "pcmdi-metrics@llnl.gov", - } - return float(genutil.statistics.rms(dm, do, axis="xyt", weights="weighted")) diff --git a/pcmdi_metrics/mean_climate/lib/rmsc_xy.py b/pcmdi_metrics/mean_climate/lib/rmsc_xy.py deleted file mode 100644 index 125e57de2..000000000 --- a/pcmdi_metrics/mean_climate/lib/rmsc_xy.py +++ /dev/null @@ -1,16 +0,0 @@ -import genutil - - -def compute(dm, do): - """Computes centered rms""" - if dm is None and do is None: # just want the doc - return { - "Name": "Spatial Root Mean Square", - "Abstract": "Compute Centered Spatial Root Mean Square", - "URI": "http://uvcdat.llnl.gov/documentation/" - + "utilities/utilities-2.html", - "Contact": "pcmdi-metrics@llnl.gov", - } - return float( - genutil.statistics.rms(dm, do, axis="xy", centered=1, weights="weighted") - ) diff --git a/pcmdi_metrics/mean_climate/lib/seasonal_mean.py b/pcmdi_metrics/mean_climate/lib/seasonal_mean.py deleted file mode 100644 index 2220ae64e..000000000 --- a/pcmdi_metrics/mean_climate/lib/seasonal_mean.py +++ /dev/null @@ -1,30 +0,0 @@ -def compute(d, sea): - """Computes SEASONAL MEAN""" - if d is None and sea is None: # just want the doc - return { - "Name": "Seasonal Mean", - "Abstract": "Compute Seasonal Mean", - "Contact": "pcmdi-metrics@llnl.gov", - "Comments": "Assumes input are 12 months climatology", - } - - mo_wts = [31, 31, 28.25, 31, 30, 31, 30, 31, 31, 30, 31, 30] - - if sea == "djf": - indx = [11, 0, 1] - if sea == "mam": - indx = [2, 3, 4] - if sea == "jja": - indx = [5, 6, 7] - if sea == "son": - indx = [8, 9, 10] - - sea_no_days = mo_wts[indx[0]] + mo_wts[indx[1]] + mo_wts[indx[2]] - - d_sea = ( - d[indx[0]] * mo_wts[indx[0]] - + d[indx[1]] * mo_wts[indx[1]] - + d[indx[2]] * mo_wts[indx[2]] - ) / sea_no_days - - return d_sea diff --git a/pcmdi_metrics/mean_climate/lib/std_xy.py b/pcmdi_metrics/mean_climate/lib/std_xy.py deleted file mode 100644 index ada831346..000000000 --- a/pcmdi_metrics/mean_climate/lib/std_xy.py +++ /dev/null @@ -1,14 +0,0 @@ -import genutil - - -def compute(d): - """Computes std""" - if d is None: # just want the doc - return { - "Name": "Spatial Standard Deviation", - "Abstract": "Compute Spatial Standard Deviation", - "URI": "http://uvcdat.llnl.gov/documentation/" - + "utilities/utilities-2.html", - "Contact": "pcmdi-metrics@llnl.gov", - } - return float(genutil.statistics.std(d, axis="xy", weights="weighted")) diff --git a/pcmdi_metrics/mean_climate/lib/std_xyt.py b/pcmdi_metrics/mean_climate/lib/std_xyt.py deleted file mode 100644 index 1bb04a0b9..000000000 --- a/pcmdi_metrics/mean_climate/lib/std_xyt.py +++ /dev/null @@ -1,14 +0,0 @@ -import genutil - - -def compute(d): - """Computes std""" - if d is None: # just want the doc - return { - "Name": "Spatial-temporal Standard Deviation", - "Abstract": "Compute Space-Time Standard Deviation", - "URI": "http://uvcdat.llnl.gov/documentation/" - + "utilities/utilities-2.html", - "Contact": "pcmdi-metrics@llnl.gov", - } - return float(genutil.statistics.std(d, axis="xyt", weights="weighted")) diff --git a/pcmdi_metrics/mean_climate/lib/zonal_mean.py b/pcmdi_metrics/mean_climate/lib/zonal_mean.py deleted file mode 100644 index 3084c5a04..000000000 --- a/pcmdi_metrics/mean_climate/lib/zonal_mean.py +++ /dev/null @@ -1,15 +0,0 @@ -import cdutil - - -def compute(dm, do): - """Computes ZONAL MEAN assumes rectilinear/regular grid""" - if dm is None and do is None: # just want the doc - return { - "Name": "Zonal Mean", - "Abstract": "Compute Zonal Mean", - "URI": "http://uvcdat.llnl.gov/documentation/" - + "utilities/utilities-2.html", - "Contact": "pcmdi-metrics@llnl.gov", - "Comments": "", - } - return cdutil.averager(dm, axis="x"), cdutil.averager(do, axis="x") diff --git a/pcmdi_metrics/version.py b/pcmdi_metrics/version.py index 81674133f..1360ce1db 100644 --- a/pcmdi_metrics/version.py +++ b/pcmdi_metrics/version.py @@ -1,3 +1,3 @@ __version__ = 'v2.3.1' -__git_tag_describe__ = 'v2.3.1-39-gdc67c92' -__git_sha1__ = 'dc67c92d9cfa204c33b38e455ef9ca9fe20628c6' +__git_tag_describe__ = 'v2.3.1-41-g89eb5916' +__git_sha1__ = '89eb5916b3b68036a359a38fc714de98a915c6ae' From b32976bb02516abe96928f34c195a8b72f2fc078 Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Thu, 27 Oct 2022 11:25:41 -0700 Subject: [PATCH 033/130] clean up --- pcmdi_metrics/__init__.py | 2 +- pcmdi_metrics/mean_climate/lib/__init__.py | 18 ++++++++--------- .../lib/mean_climate_metrics_driver.py | 20 ++++++++++--------- pcmdi_metrics/mean_climate/lib/model.py | 5 +++-- pcmdi_metrics/mean_climate/lib/observation.py | 2 +- .../mean_climate/lib/outputmetrics.py | 4 ++-- pcmdi_metrics/mean_climate/lib/pmp_parser.py | 9 ++++++--- pcmdi_metrics/version.py | 4 ++-- 8 files changed, 34 insertions(+), 30 deletions(-) diff --git a/pcmdi_metrics/__init__.py b/pcmdi_metrics/__init__.py index fde228f02..c9533b8d3 100644 --- a/pcmdi_metrics/__init__.py +++ b/pcmdi_metrics/__init__.py @@ -16,5 +16,5 @@ plog.setLevel(LOG_LEVEL) from . import io # noqa #from . import pcmdi # noqa -from . import mean_climate # noqa +#from . import mean_climate # noqa from .version import __git_sha1__, __git_tag_describe__, __version__ # noqa diff --git a/pcmdi_metrics/mean_climate/lib/__init__.py b/pcmdi_metrics/mean_climate/lib/__init__.py index af30ffdde..ccb589afa 100644 --- a/pcmdi_metrics/mean_climate/lib/__init__.py +++ b/pcmdi_metrics/mean_climate/lib/__init__.py @@ -12,17 +12,15 @@ seasonal_mean, std_xy, std_xyt, - zonal_mean, + zonal_mean ) from .mean_climate_metrics_calculations import compute_metrics # noqa from .mean_climate_metrics_driver import PMPDriver, create_mean_climate_parser # noqa -from . import( # noqa - io, - pmp_parser, - pmp_parameter, - outputmetrics, - observation, - model, - dataset, -) \ No newline at end of file +from .io import OBS, JSONs # noqa +from .pmp_parser import PMPParser, PMPMetricsParser # noqa +from .pmp_parameter import PMPParameter, PMPMetricsParameter # noqa +from .outputmetrics import OutputMetrics # noqa +from .observation import OBS, Observation # noqa +from .model import Model # noqa +from .dataset import DataSet # noqa diff --git a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py index 6ac52fd39..34f5f1a1c 100644 --- a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py +++ b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py @@ -3,12 +3,14 @@ import json import logging -import pcmdi_metrics.mean_climate.lib.dataset -import pcmdi_metrics.mean_climate.lib.pmp_parser +#import pcmdi_metrics.mean_climate.lib.dataset +from pcmdi_metrics.mean_climate.lib import DataSet +#import pcmdi_metrics.mean_climate.lib.pmp_parser +from pcmdi_metrics.mean_climate.lib import pmp_parser from pcmdi_metrics import LOG_LEVEL -from pcmdi_metrics.mean_climate.lib.model import Model -from pcmdi_metrics.mean_climate.lib.observation import Observation -from pcmdi_metrics.mean_climate.lib.outputmetrics import OutputMetrics +from pcmdi_metrics.mean_climate.lib import Model +from pcmdi_metrics.mean_climate.lib import Observation +from pcmdi_metrics.mean_climate.lib import OutputMetrics class PMPDriver(object): @@ -37,7 +39,7 @@ def __init__(self, parameter): self.var = "" self.output_metric = None self.region = "" - self.sftlf = pcmdi_metrics.mean_climate.lib.dataset.DataSet.create_sftlf(self.parameter) + self.sftlf = DataSet.create_sftlf(self.parameter) self.default_regions = [] self.regions_specs = {} @@ -67,7 +69,7 @@ def load_obs_dict(self): """Loads obs_info_dictionary.json and appends custom_observations from the parameter file if needed.""" obs_file_name = "obs_info_dictionary.json" - obs_json_file = pcmdi_metrics.mean_climate.lib.dataset.DataSet.load_path_as_file_obj( + obs_json_file = DataSet.load_path_as_file_obj( obs_file_name ) obs_dict = json.loads(obs_json_file.read()) @@ -103,7 +105,7 @@ def load_default_regions_and_regions_specs(self): """Gets the default_regions dict and regions_specs dict from default_regions.py and stores them as attributes.""" default_regions_file = ( - pcmdi_metrics.mean_climate.lib.dataset.DataSet.load_path_as_file_obj( + DataSet.load_path_as_file_obj( "default_regions.py" ) ) @@ -269,7 +271,7 @@ def determine_obs_or_model(self, is_obs, ref_or_test, data_path): def create_mean_climate_parser(): - parser = pcmdi_metrics.mean_climate.lib.pmp_parser.PMPMetricsParser() + parser = pmp_parser.PMPMetricsParser() parser.add_argument( "--case_id", dest="case_id", diff --git a/pcmdi_metrics/mean_climate/lib/model.py b/pcmdi_metrics/mean_climate/lib/model.py index 2588fd479..127c1eb5a 100644 --- a/pcmdi_metrics/mean_climate/lib/model.py +++ b/pcmdi_metrics/mean_climate/lib/model.py @@ -5,12 +5,13 @@ import cdutil import MV2 -import pcmdi_metrics.mean_climate.lib.dataset +from pcmdi_metrics.mean_climate.lib import DataSet from pcmdi_metrics import LOG_LEVEL from pcmdi_metrics.io.base import Base -class Model(pcmdi_metrics.mean_climate.lib.dataset.DataSet): +#class Model(pcmdi_metrics.mean_climate.lib.dataset.DataSet): +class Model(DataSet): """Handles all the computation (setting masking, target grid, etc) and some file I/O related to models.""" diff --git a/pcmdi_metrics/mean_climate/lib/observation.py b/pcmdi_metrics/mean_climate/lib/observation.py index 001889641..1d17c7de7 100644 --- a/pcmdi_metrics/mean_climate/lib/observation.py +++ b/pcmdi_metrics/mean_climate/lib/observation.py @@ -3,7 +3,7 @@ import MV2 from pcmdi_metrics import LOG_LEVEL -from pcmdi_metrics.mean_climate.lib.dataset import DataSet +from pcmdi_metrics.mean_climate.lib import DataSet from pcmdi_metrics.io.base import Base try: diff --git a/pcmdi_metrics/mean_climate/lib/outputmetrics.py b/pcmdi_metrics/mean_climate/lib/outputmetrics.py index 97718ff39..ce44484b4 100644 --- a/pcmdi_metrics/mean_climate/lib/outputmetrics.py +++ b/pcmdi_metrics/mean_climate/lib/outputmetrics.py @@ -6,8 +6,8 @@ import pcmdi_metrics from pcmdi_metrics import LOG_LEVEL -from pcmdi_metrics.mean_climate.lib.dataset import DataSet -from pcmdi_metrics.mean_climate.lib.observation import Observation +from pcmdi_metrics.mean_climate.lib import DataSet +from pcmdi_metrics.mean_climate.lib import Observation from pcmdi_metrics.io.base import Base try: diff --git a/pcmdi_metrics/mean_climate/lib/pmp_parser.py b/pcmdi_metrics/mean_climate/lib/pmp_parser.py index 249496e75..dc9243264 100644 --- a/pcmdi_metrics/mean_climate/lib/pmp_parser.py +++ b/pcmdi_metrics/mean_climate/lib/pmp_parser.py @@ -2,7 +2,8 @@ import cdp.cdp_parser -import pcmdi_metrics.mean_climate.lib.pmp_parameter +#import pcmdi_metrics.mean_climate.lib.pmp_parameter +from pcmdi_metrics.mean_climate.lib import pmp_parameter from pcmdi_metrics import resources try: @@ -21,7 +22,8 @@ def path_to_default_args(): class PMPParser(cdp.cdp_parser.CDPParser): def __init__(self, *args, **kwargs): super(PMPParser, self).__init__( - pcmdi_metrics.mean_climate.lib.pmp_parameter.PMPParameter, + #pcmdi_metrics.mean_climate.lib.pmp_parameter.PMPParameter, + pmp_parameter.PMPParameter, path_to_default_args(), *args, **kwargs, @@ -33,7 +35,8 @@ def __init__(self, *args, **kwargs): class PMPMetricsParser(cdp.cdp_parser.CDPParser): def __init__(self, *args, **kwargs): super(PMPMetricsParser, self).__init__( - pcmdi_metrics.mean_climate.lib.pmp_parameter.PMPMetricsParameter, + #pcmdi_metrics.mean_climate.lib.pmp_parameter.PMPMetricsParameter, + pmp_parameter.PMPMetricsParameter, path_to_default_args(), *args, **kwargs, diff --git a/pcmdi_metrics/version.py b/pcmdi_metrics/version.py index 1360ce1db..a215f036f 100644 --- a/pcmdi_metrics/version.py +++ b/pcmdi_metrics/version.py @@ -1,3 +1,3 @@ __version__ = 'v2.3.1' -__git_tag_describe__ = 'v2.3.1-41-g89eb5916' -__git_sha1__ = '89eb5916b3b68036a359a38fc714de98a915c6ae' +__git_tag_describe__ = 'v2.3.1-42-g10b0aeaf' +__git_sha1__ = '10b0aeaf676df0b66258c208b4298145f92e4f08' From de78869f7e3f8512647615ca8175b1972b89112b Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Thu, 27 Oct 2022 13:06:08 -0700 Subject: [PATCH 034/130] clean up --- pcmdi_metrics/mean_climate/lib/__init__.py | 21 ++-- .../lib/mean_climate_metrics_calculations.py | 104 +++++++++--------- .../lib/mean_climate_metrics_driver.py | 10 +- pcmdi_metrics/mean_climate/lib/model.py | 2 +- pcmdi_metrics/mean_climate/lib/observation.py | 2 +- .../mean_climate/lib/outputmetrics.py | 4 +- pcmdi_metrics/mean_climate/lib/pmp_parser.py | 6 +- pcmdi_metrics/version.py | 4 +- 8 files changed, 77 insertions(+), 76 deletions(-) diff --git a/pcmdi_metrics/mean_climate/lib/__init__.py b/pcmdi_metrics/mean_climate/lib/__init__.py index ccb589afa..99a81d5b7 100644 --- a/pcmdi_metrics/mean_climate/lib/__init__.py +++ b/pcmdi_metrics/mean_climate/lib/__init__.py @@ -14,13 +14,16 @@ std_xyt, zonal_mean ) -from .mean_climate_metrics_calculations import compute_metrics # noqa -from .mean_climate_metrics_driver import PMPDriver, create_mean_climate_parser # noqa -from .io import OBS, JSONs # noqa -from .pmp_parser import PMPParser, PMPMetricsParser # noqa -from .pmp_parameter import PMPParameter, PMPMetricsParameter # noqa -from .outputmetrics import OutputMetrics # noqa -from .observation import OBS, Observation # noqa -from .model import Model # noqa -from .dataset import DataSet # noqa +from . import ( # noqa + io, + pmp_parser, # PMPParser, PMPMetricsParser + pmp_parameter, # PMPParameter, PMPMetricsParameter + outputmetrics, # OutputMetrics + observation, # OBS, Observation + model, # Model + dataset # DataSet +) + +from .mean_climate_metrics_calculations import compute_metrics # noqa +from .mean_climate_metrics_driver import PMPDriver, create_mean_climate_parser # noqa \ No newline at end of file diff --git a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_calculations.py b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_calculations.py index fd8fa95cb..4da00ddfc 100644 --- a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_calculations.py +++ b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_calculations.py @@ -13,26 +13,26 @@ def compute_metrics(Var, dm, do): # Did we send data? Or do we just want the info? if dm is None and do is None: metrics_defs = collections.OrderedDict() - metrics_defs["rms_xyt"] = pcmdi_metrics.mean_climate.lib.rms_xyt.compute(None, None) - metrics_defs["rms_xy"] = pcmdi_metrics.mean_climate.lib.rms_xy.compute(None, None) - metrics_defs["rmsc_xy"] = pcmdi_metrics.mean_climate.lib.rmsc_xy.compute(None, None) - metrics_defs["bias_xy"] = pcmdi_metrics.mean_climate.lib.bias_xy.compute(None, None) - metrics_defs["mae_xy"] = pcmdi_metrics.mean_climate.lib.meanabs_xy.compute(None, None) - # metrics_defs["cor_xyt"] = pcmdi_metrics.mean_climate.lib.cor_xyt.compute( + metrics_defs["rms_xyt"] = pcmdi_metrics.mean_climate.lib.rms_xyt(None, None) + metrics_defs["rms_xy"] = pcmdi_metrics.mean_climate.lib.rms_xy(None, None) + metrics_defs["rmsc_xy"] = pcmdi_metrics.mean_climate.lib.rmsc_xy(None, None) + metrics_defs["bias_xy"] = pcmdi_metrics.mean_climate.lib.bias_xy(None, None) + metrics_defs["mae_xy"] = pcmdi_metrics.mean_climate.lib.meanabs_xy(None, None) + # metrics_defs["cor_xyt"] = pcmdi_metrics.mean_climate.lib.cor_xyt( # None, # None) - metrics_defs["cor_xy"] = pcmdi_metrics.mean_climate.lib.cor_xy.compute(None, None) - metrics_defs["mean_xy"] = pcmdi_metrics.mean_climate.lib.mean_xy.compute(None) - metrics_defs["std_xy"] = pcmdi_metrics.mean_climate.lib.std_xy.compute(None) - metrics_defs["std_xyt"] = pcmdi_metrics.mean_climate.lib.std_xyt.compute(None) + metrics_defs["cor_xy"] = pcmdi_metrics.mean_climate.lib.cor_xy(None, None) + metrics_defs["mean_xy"] = pcmdi_metrics.mean_climate.lib.mean_xy(None) + metrics_defs["std_xy"] = pcmdi_metrics.mean_climate.lib.std_xy(None) + metrics_defs["std_xyt"] = pcmdi_metrics.mean_climate.lib.std_xyt(None) - metrics_defs["seasonal_mean"] = pcmdi_metrics.mean_climate.lib.seasonal_mean.compute( + metrics_defs["seasonal_mean"] = pcmdi_metrics.mean_climate.lib.seasonal_mean( None, None ) - metrics_defs["annual_mean"] = pcmdi_metrics.mean_climate.lib.annual_mean.compute( + metrics_defs["annual_mean"] = pcmdi_metrics.mean_climate.lib.annual_mean( None, None ) - metrics_defs["zonal_mean"] = pcmdi_metrics.mean_climate.lib.zonal_mean.compute(None, None) + metrics_defs["zonal_mean"] = pcmdi_metrics.mean_climate.lib.zonal_mean(None, None) return metrics_defs cdms.setAutoBounds("on") metrics_dictionary = {} @@ -49,54 +49,54 @@ def compute_metrics(Var, dm, do): sig_digits = ".3f" # CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD - rms_xyt = pcmdi_metrics.mean_climate.lib.rms_xyt.compute(dm, do) - # cor_xyt = pcmdi_metrics.mean_climate.lib.cor_xyt.compute(dm, do) - stdObs_xyt = pcmdi_metrics.mean_climate.lib.std_xyt.compute(do) - std_xyt = pcmdi_metrics.mean_climate.lib.std_xyt.compute(dm) + rms_xyt = pcmdi_metrics.mean_climate.lib.rms_xyt(dm, do) + # cor_xyt = pcmdi_metrics.mean_climate.lib.cor_xyt(dm, do) + stdObs_xyt = pcmdi_metrics.mean_climate.lib.std_xyt(do) + std_xyt = pcmdi_metrics.mean_climate.lib.std_xyt(dm) # CALCULATE ANNUAL MEANS - dm_am, do_am = pcmdi_metrics.mean_climate.lib.annual_mean.compute(dm, do) + dm_am, do_am = pcmdi_metrics.mean_climate.lib.annual_mean(dm, do) # CALCULATE ANNUAL MEAN BIAS - bias_xy = pcmdi_metrics.mean_climate.lib.bias_xy.compute(dm_am, do_am) + bias_xy = pcmdi_metrics.mean_climate.lib.bias_xy(dm_am, do_am) # CALCULATE MEAN ABSOLUTE ERROR - mae_xy = pcmdi_metrics.mean_climate.lib.meanabs_xy.compute(dm_am, do_am) + mae_xy = pcmdi_metrics.mean_climate.lib.meanabs_xy(dm_am, do_am) # CALCULATE ANNUAL MEAN RMS (centered and uncentered) - rms_xy = pcmdi_metrics.mean_climate.lib.rms_xy.compute(dm_am, do_am) - rmsc_xy = pcmdi_metrics.mean_climate.lib.rmsc_xy.compute(dm_am, do_am) + rms_xy = pcmdi_metrics.mean_climate.lib.rms_xy(dm_am, do_am) + rmsc_xy = pcmdi_metrics.mean_climate.lib.rmsc_xy(dm_am, do_am) # CALCULATE ANNUAL MEAN CORRELATION - cor_xy = pcmdi_metrics.mean_climate.lib.cor_xy.compute(dm_am, do_am) + cor_xy = pcmdi_metrics.mean_climate.lib.cor_xy(dm_am, do_am) # CALCULATE ANNUAL OBS and MOD STD - stdObs_xy = pcmdi_metrics.mean_climate.lib.std_xy.compute(do_am) - std_xy = pcmdi_metrics.mean_climate.lib.std_xy.compute(dm_am) + stdObs_xy = pcmdi_metrics.mean_climate.lib.std_xy(do_am) + std_xy = pcmdi_metrics.mean_climate.lib.std_xy(dm_am) # CALCULATE ANNUAL OBS and MOD MEAN - meanObs_xy = pcmdi_metrics.mean_climate.lib.mean_xy.compute(do_am) - mean_xy = pcmdi_metrics.mean_climate.lib.mean_xy.compute(dm_am) + meanObs_xy = pcmdi_metrics.mean_climate.lib.mean_xy(do_am) + mean_xy = pcmdi_metrics.mean_climate.lib.mean_xy(dm_am) # ZONAL MEANS ###### # CALCULATE ANNUAL MEANS - dm_amzm, do_amzm = pcmdi_metrics.mean_climate.lib.zonal_mean.compute(dm_am, do_am) + dm_amzm, do_amzm = pcmdi_metrics.mean_climate.lib.zonal_mean(dm_am, do_am) # CALCULATE ANNUAL AND ZONAL MEAN RMS - rms_y = pcmdi_metrics.mean_climate.lib.rms_0.compute(dm_amzm, do_amzm) + rms_y = pcmdi_metrics.mean_climate.lib.rms_0(dm_amzm, do_amzm) # CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS dm_amzm_grown, dummy = grower(dm_amzm, dm_am) dm_am_devzm = MV2.subtract(dm_am, dm_amzm_grown) do_amzm_grown, dummy = grower(do_amzm, do_am) do_am_devzm = MV2.subtract(do_am, do_amzm_grown) - rms_xy_devzm = pcmdi_metrics.mean_climate.lib.rms_xy.compute(dm_am_devzm, do_am_devzm) + rms_xy_devzm = pcmdi_metrics.mean_climate.lib.rms_xy(dm_am_devzm, do_am_devzm) # CALCULATE ANNUAL AND ZONAL MEAN STD # CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD - stdObs_xy_devzm = pcmdi_metrics.mean_climate.lib.std_xy.compute(do_am_devzm) - std_xy_devzm = pcmdi_metrics.mean_climate.lib.std_xy.compute(dm_am_devzm) + stdObs_xy_devzm = pcmdi_metrics.mean_climate.lib.std_xy(do_am_devzm) + std_xy_devzm = pcmdi_metrics.mean_climate.lib.std_xy(dm_am_devzm) for stat in [ "std-obs_xy", @@ -141,23 +141,23 @@ def compute_metrics(Var, dm, do): # CALCULATE SEASONAL MEANS for sea in ["djf", "mam", "jja", "son"]: - dm_sea = pcmdi_metrics.mean_climate.lib.seasonal_mean.compute(dm, sea) - do_sea = pcmdi_metrics.mean_climate.lib.seasonal_mean.compute(do, sea) + dm_sea = pcmdi_metrics.mean_climate.lib.seasonal_mean(dm, sea) + do_sea = pcmdi_metrics.mean_climate.lib.seasonal_mean(do, sea) # CALCULATE SEASONAL RMS AND CORRELATION - rms_sea = pcmdi_metrics.mean_climate.lib.rms_xy.compute(dm_sea, do_sea) - rmsc_sea = pcmdi_metrics.mean_climate.lib.rmsc_xy.compute(dm_sea, do_sea) - cor_sea = pcmdi_metrics.mean_climate.lib.cor_xy.compute(dm_sea, do_sea) - mae_sea = pcmdi_metrics.mean_climate.lib.meanabs_xy.compute(dm_sea, do_sea) - bias_sea = pcmdi_metrics.mean_climate.lib.bias_xy.compute(dm_sea, do_sea) + rms_sea = pcmdi_metrics.mean_climate.lib.rms_xy(dm_sea, do_sea) + rmsc_sea = pcmdi_metrics.mean_climate.lib.rmsc_xy(dm_sea, do_sea) + cor_sea = pcmdi_metrics.mean_climate.lib.cor_xy(dm_sea, do_sea) + mae_sea = pcmdi_metrics.mean_climate.lib.meanabs_xy(dm_sea, do_sea) + bias_sea = pcmdi_metrics.mean_climate.lib.bias_xy(dm_sea, do_sea) # CALCULATE SEASONAL OBS and MOD STD - stdObs_xy_sea = pcmdi_metrics.mean_climate.lib.std_xy.compute(do_sea) - std_xy_sea = pcmdi_metrics.mean_climate.lib.std_xy.compute(dm_sea) + stdObs_xy_sea = pcmdi_metrics.mean_climate.lib.std_xy(do_sea) + std_xy_sea = pcmdi_metrics.mean_climate.lib.std_xy(dm_sea) # CALCULATE SEASONAL OBS and MOD MEAN - meanObs_xy_sea = pcmdi_metrics.mean_climate.lib.mean_xy.compute(do_sea) - mean_xy_sea = pcmdi_metrics.mean_climate.lib.mean_xy.compute(dm_sea) + meanObs_xy_sea = pcmdi_metrics.mean_climate.lib.mean_xy(do_sea) + mean_xy_sea = pcmdi_metrics.mean_climate.lib.mean_xy(dm_sea) metrics_dictionary["bias_xy"][sea] = format(bias_sea * conv, sig_digits) metrics_dictionary["rms_xy"][sea] = format(rms_sea * conv, sig_digits) @@ -201,19 +201,19 @@ def compute_metrics(Var, dm, do): do_mo = do[n] # CALCULATE MONTHLY RMS AND CORRELATION - rms_mo = pcmdi_metrics.mean_climate.lib.rms_xy.compute(dm_mo, do_mo) - rmsc_mo = pcmdi_metrics.mean_climate.lib.rmsc_xy.compute(dm_mo, do_mo) - cor_mo = pcmdi_metrics.mean_climate.lib.cor_xy.compute(dm_mo, do_mo) - mae_mo = pcmdi_metrics.mean_climate.lib.meanabs_xy.compute(dm_mo, do_mo) - bias_mo = pcmdi_metrics.mean_climate.lib.bias_xy.compute(dm_mo, do_mo) + rms_mo = pcmdi_metrics.mean_climate.lib.rms_xy(dm_mo, do_mo) + rmsc_mo = pcmdi_metrics.mean_climate.lib.rmsc_xy(dm_mo, do_mo) + cor_mo = pcmdi_metrics.mean_climate.lib.cor_xy(dm_mo, do_mo) + mae_mo = pcmdi_metrics.mean_climate.lib.meanabs_xy(dm_mo, do_mo) + bias_mo = pcmdi_metrics.mean_climate.lib.bias_xy(dm_mo, do_mo) # CALCULATE MONTHLY OBS and MOD STD - stdObs_xy_mo = pcmdi_metrics.mean_climate.lib.std_xy.compute(do_mo) - std_xy_mo = pcmdi_metrics.mean_climate.lib.std_xy.compute(dm_mo) + stdObs_xy_mo = pcmdi_metrics.mean_climate.lib.std_xy(do_mo) + std_xy_mo = pcmdi_metrics.mean_climate.lib.std_xy(dm_mo) # CALCULATE MONTHLY OBS and MOD MEAN - meanObs_xy_mo = pcmdi_metrics.mean_climate.lib.mean_xy.compute(do_mo) - mean_xy_mo = pcmdi_metrics.mean_climate.lib.mean_xy.compute(dm_mo) + meanObs_xy_mo = pcmdi_metrics.mean_climate.lib.mean_xy(do_mo) + mean_xy_mo = pcmdi_metrics.mean_climate.lib.mean_xy(dm_mo) rms_mo_l.append(format(rms_mo * conv, sig_digits)) rmsc_mo_l.append(format(rmsc_mo * conv, sig_digits)) diff --git a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py index 34f5f1a1c..37ef3c7f9 100644 --- a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py +++ b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py @@ -3,14 +3,12 @@ import json import logging -#import pcmdi_metrics.mean_climate.lib.dataset -from pcmdi_metrics.mean_climate.lib import DataSet -#import pcmdi_metrics.mean_climate.lib.pmp_parser +from pcmdi_metrics.mean_climate.lib.dataset import DataSet from pcmdi_metrics.mean_climate.lib import pmp_parser from pcmdi_metrics import LOG_LEVEL -from pcmdi_metrics.mean_climate.lib import Model -from pcmdi_metrics.mean_climate.lib import Observation -from pcmdi_metrics.mean_climate.lib import OutputMetrics +from pcmdi_metrics.mean_climate.lib.model import Model +from pcmdi_metrics.mean_climate.lib.observation import Observation +from pcmdi_metrics.mean_climate.lib.outputmetrics import OutputMetrics class PMPDriver(object): diff --git a/pcmdi_metrics/mean_climate/lib/model.py b/pcmdi_metrics/mean_climate/lib/model.py index 127c1eb5a..0c813d50f 100644 --- a/pcmdi_metrics/mean_climate/lib/model.py +++ b/pcmdi_metrics/mean_climate/lib/model.py @@ -5,7 +5,7 @@ import cdutil import MV2 -from pcmdi_metrics.mean_climate.lib import DataSet +from pcmdi_metrics.mean_climate.lib.dataset import DataSet from pcmdi_metrics import LOG_LEVEL from pcmdi_metrics.io.base import Base diff --git a/pcmdi_metrics/mean_climate/lib/observation.py b/pcmdi_metrics/mean_climate/lib/observation.py index 1d17c7de7..001889641 100644 --- a/pcmdi_metrics/mean_climate/lib/observation.py +++ b/pcmdi_metrics/mean_climate/lib/observation.py @@ -3,7 +3,7 @@ import MV2 from pcmdi_metrics import LOG_LEVEL -from pcmdi_metrics.mean_climate.lib import DataSet +from pcmdi_metrics.mean_climate.lib.dataset import DataSet from pcmdi_metrics.io.base import Base try: diff --git a/pcmdi_metrics/mean_climate/lib/outputmetrics.py b/pcmdi_metrics/mean_climate/lib/outputmetrics.py index ce44484b4..97718ff39 100644 --- a/pcmdi_metrics/mean_climate/lib/outputmetrics.py +++ b/pcmdi_metrics/mean_climate/lib/outputmetrics.py @@ -6,8 +6,8 @@ import pcmdi_metrics from pcmdi_metrics import LOG_LEVEL -from pcmdi_metrics.mean_climate.lib import DataSet -from pcmdi_metrics.mean_climate.lib import Observation +from pcmdi_metrics.mean_climate.lib.dataset import DataSet +from pcmdi_metrics.mean_climate.lib.observation import Observation from pcmdi_metrics.io.base import Base try: diff --git a/pcmdi_metrics/mean_climate/lib/pmp_parser.py b/pcmdi_metrics/mean_climate/lib/pmp_parser.py index dc9243264..575033b2a 100644 --- a/pcmdi_metrics/mean_climate/lib/pmp_parser.py +++ b/pcmdi_metrics/mean_climate/lib/pmp_parser.py @@ -3,7 +3,7 @@ import cdp.cdp_parser #import pcmdi_metrics.mean_climate.lib.pmp_parameter -from pcmdi_metrics.mean_climate.lib import pmp_parameter +from pcmdi_metrics.mean_climate.lib.pmp_parameter import PMPParameter, PMPMetricsParameter from pcmdi_metrics import resources try: @@ -23,7 +23,7 @@ class PMPParser(cdp.cdp_parser.CDPParser): def __init__(self, *args, **kwargs): super(PMPParser, self).__init__( #pcmdi_metrics.mean_climate.lib.pmp_parameter.PMPParameter, - pmp_parameter.PMPParameter, + PMPParameter, path_to_default_args(), *args, **kwargs, @@ -36,7 +36,7 @@ class PMPMetricsParser(cdp.cdp_parser.CDPParser): def __init__(self, *args, **kwargs): super(PMPMetricsParser, self).__init__( #pcmdi_metrics.mean_climate.lib.pmp_parameter.PMPMetricsParameter, - pmp_parameter.PMPMetricsParameter, + PMPMetricsParameter, path_to_default_args(), *args, **kwargs, diff --git a/pcmdi_metrics/version.py b/pcmdi_metrics/version.py index a215f036f..fc6e44cd4 100644 --- a/pcmdi_metrics/version.py +++ b/pcmdi_metrics/version.py @@ -1,3 +1,3 @@ __version__ = 'v2.3.1' -__git_tag_describe__ = 'v2.3.1-42-g10b0aeaf' -__git_sha1__ = '10b0aeaf676df0b66258c208b4298145f92e4f08' +__git_tag_describe__ = 'v2.3.1-43-gb32976bb' +__git_sha1__ = 'b32976bb02516abe96928f34c195a8b72f2fc078' From 8c00d84d7cfbbc467bf66cee53426cc6cead729c Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 27 Oct 2022 13:16:44 -0700 Subject: [PATCH 035/130] pre-commit clean up --- pcmdi_metrics/mean_climate/lib/__init__.py | 36 +++++++++---------- .../mean_climate/lib/compute_statistics.py | 2 +- .../lib/mean_climate_metrics_driver.py | 4 +-- pcmdi_metrics/mean_climate/lib/model.py | 3 +- pcmdi_metrics/mean_climate/lib/observation.py | 2 +- .../mean_climate/lib/outputmetrics.py | 2 +- pcmdi_metrics/mean_climate/lib/pmp_parser.py | 8 ++--- .../param/basic_annual_cycle_param.py | 4 +-- .../mean_climate/param/basic_param.py | 18 +++++----- .../pcmdi_compute_climatologies.py | 8 ++--- pcmdi_metrics/version.py | 4 +-- 11 files changed, 42 insertions(+), 49 deletions(-) diff --git a/pcmdi_metrics/mean_climate/lib/__init__.py b/pcmdi_metrics/mean_climate/lib/__init__.py index 99a81d5b7..77dfadab0 100644 --- a/pcmdi_metrics/mean_climate/lib/__init__.py +++ b/pcmdi_metrics/mean_climate/lib/__init__.py @@ -1,29 +1,25 @@ +from . import dataset # DataSet +from . import io # noqa +from . import model # Model +from . import observation # OBS, Observation +from . import outputmetrics # OutputMetrics +from . import pmp_parameter # PMPParameter, PMPMetricsParameter +from . import pmp_parser # PMPParser, PMPMetricsParser from .compute_statistics import ( # noqa annual_mean, bias_xy, cor_xy, cor_xyt, mean_xy, - meanabs_xy, - rms_0, - rms_xy, + meanabs_xy, + rms_0, + rms_xy, rms_xyt, - rmsc_xy, - seasonal_mean, - std_xy, - std_xyt, - zonal_mean + rmsc_xy, + seasonal_mean, + std_xy, + std_xyt, + zonal_mean, ) - -from . import ( # noqa - io, - pmp_parser, # PMPParser, PMPMetricsParser - pmp_parameter, # PMPParameter, PMPMetricsParameter - outputmetrics, # OutputMetrics - observation, # OBS, Observation - model, # Model - dataset # DataSet -) - from .mean_climate_metrics_calculations import compute_metrics # noqa -from .mean_climate_metrics_driver import PMPDriver, create_mean_climate_parser # noqa \ No newline at end of file +from .mean_climate_metrics_driver import PMPDriver, create_mean_climate_parser # noqa diff --git a/pcmdi_metrics/mean_climate/lib/compute_statistics.py b/pcmdi_metrics/mean_climate/lib/compute_statistics.py index be8e957f3..3aec8aecf 100644 --- a/pcmdi_metrics/mean_climate/lib/compute_statistics.py +++ b/pcmdi_metrics/mean_climate/lib/compute_statistics.py @@ -235,4 +235,4 @@ def zonal_mean(dm, do): "Contact": "pcmdi-metrics@llnl.gov", "Comments": "", } - return cdutil.averager(dm, axis="x"), cdutil.averager(do, axis="x") \ No newline at end of file + return cdutil.averager(dm, axis="x"), cdutil.averager(do, axis="x") diff --git a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py index 37ef3c7f9..08e10596a 100644 --- a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py +++ b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py @@ -3,9 +3,9 @@ import json import logging -from pcmdi_metrics.mean_climate.lib.dataset import DataSet -from pcmdi_metrics.mean_climate.lib import pmp_parser from pcmdi_metrics import LOG_LEVEL +from pcmdi_metrics.mean_climate.lib import pmp_parser +from pcmdi_metrics.mean_climate.lib.dataset import DataSet from pcmdi_metrics.mean_climate.lib.model import Model from pcmdi_metrics.mean_climate.lib.observation import Observation from pcmdi_metrics.mean_climate.lib.outputmetrics import OutputMetrics diff --git a/pcmdi_metrics/mean_climate/lib/model.py b/pcmdi_metrics/mean_climate/lib/model.py index 0c813d50f..5de1d3160 100644 --- a/pcmdi_metrics/mean_climate/lib/model.py +++ b/pcmdi_metrics/mean_climate/lib/model.py @@ -5,12 +5,11 @@ import cdutil import MV2 -from pcmdi_metrics.mean_climate.lib.dataset import DataSet from pcmdi_metrics import LOG_LEVEL from pcmdi_metrics.io.base import Base +from pcmdi_metrics.mean_climate.lib.dataset import DataSet -#class Model(pcmdi_metrics.mean_climate.lib.dataset.DataSet): class Model(DataSet): """Handles all the computation (setting masking, target grid, etc) and some file I/O related to models.""" diff --git a/pcmdi_metrics/mean_climate/lib/observation.py b/pcmdi_metrics/mean_climate/lib/observation.py index 001889641..4bc2b970d 100644 --- a/pcmdi_metrics/mean_climate/lib/observation.py +++ b/pcmdi_metrics/mean_climate/lib/observation.py @@ -3,8 +3,8 @@ import MV2 from pcmdi_metrics import LOG_LEVEL -from pcmdi_metrics.mean_climate.lib.dataset import DataSet from pcmdi_metrics.io.base import Base +from pcmdi_metrics.mean_climate.lib.dataset import DataSet try: basestring # noqa diff --git a/pcmdi_metrics/mean_climate/lib/outputmetrics.py b/pcmdi_metrics/mean_climate/lib/outputmetrics.py index 97718ff39..29d14c459 100644 --- a/pcmdi_metrics/mean_climate/lib/outputmetrics.py +++ b/pcmdi_metrics/mean_climate/lib/outputmetrics.py @@ -6,9 +6,9 @@ import pcmdi_metrics from pcmdi_metrics import LOG_LEVEL +from pcmdi_metrics.io.base import Base from pcmdi_metrics.mean_climate.lib.dataset import DataSet from pcmdi_metrics.mean_climate.lib.observation import Observation -from pcmdi_metrics.io.base import Base try: basestring # noqa diff --git a/pcmdi_metrics/mean_climate/lib/pmp_parser.py b/pcmdi_metrics/mean_climate/lib/pmp_parser.py index 575033b2a..cc4bfed9a 100644 --- a/pcmdi_metrics/mean_climate/lib/pmp_parser.py +++ b/pcmdi_metrics/mean_climate/lib/pmp_parser.py @@ -2,9 +2,11 @@ import cdp.cdp_parser -#import pcmdi_metrics.mean_climate.lib.pmp_parameter -from pcmdi_metrics.mean_climate.lib.pmp_parameter import PMPParameter, PMPMetricsParameter from pcmdi_metrics import resources +from pcmdi_metrics.mean_climate.lib.pmp_parameter import ( + PMPMetricsParameter, + PMPParameter, +) try: basestring # noqa @@ -22,7 +24,6 @@ def path_to_default_args(): class PMPParser(cdp.cdp_parser.CDPParser): def __init__(self, *args, **kwargs): super(PMPParser, self).__init__( - #pcmdi_metrics.mean_climate.lib.pmp_parameter.PMPParameter, PMPParameter, path_to_default_args(), *args, @@ -35,7 +36,6 @@ def __init__(self, *args, **kwargs): class PMPMetricsParser(cdp.cdp_parser.CDPParser): def __init__(self, *args, **kwargs): super(PMPMetricsParser, self).__init__( - #pcmdi_metrics.mean_climate.lib.pmp_parameter.PMPMetricsParameter, PMPMetricsParameter, path_to_default_args(), *args, diff --git a/pcmdi_metrics/mean_climate/param/basic_annual_cycle_param.py b/pcmdi_metrics/mean_climate/param/basic_annual_cycle_param.py index 0222863e7..29d40c1ea 100644 --- a/pcmdi_metrics/mean_climate/param/basic_annual_cycle_param.py +++ b/pcmdi_metrics/mean_climate/param/basic_annual_cycle_param.py @@ -1,11 +1,11 @@ # VARIABLES TO USE vars = ['pr'] -#vars = ['ua', 'ta'] +# vars = ['ua', 'ta'] vars = ['pr', 'ua', 'ta'] # START AND END DATES FOR CLIMATOLOGY start = '1981-01' -#end = '1983-12' +# end = '1983-12' end = '2005-12' # INPUT DATASET - CAN BE MODEL OR OBSERVATIONS diff --git a/pcmdi_metrics/mean_climate/param/basic_param.py b/pcmdi_metrics/mean_climate/param/basic_param.py index 8f8db45f1..8148c390b 100644 --- a/pcmdi_metrics/mean_climate/param/basic_param.py +++ b/pcmdi_metrics/mean_climate/param/basic_param.py @@ -16,14 +16,14 @@ # VARIABLES TO USE -#vars = ['pr', 'ua_850'] +# vars = ['pr', 'ua_850'] vars = ['pr'] # Observations to use at the moment "default" or "alternate" -#reference_data_set = ['all'] +# reference_data_set = ['all'] reference_data_set = ['default'] -#ext = '.nc' +# ext = '.nc' # INTERPOLATION OPTIONS target_grid = '2.5x2.5' # OPTIONS: '2.5x2.5' or an actual cdms2 grid object @@ -35,9 +35,9 @@ regrid_method_ocn = 'linear' # SAVE INTERPOLATED MODEL CLIMATOLOGIES ? -save_test_clims = True # True or False +save_test_clims = True # True or False -## DIRECTORY WHERE TO PUT INTERPOLATED MODELS' CLIMATOLOGIES +# DIRECTORY WHERE TO PUT INTERPOLATED MODELS' CLIMATOLOGIES test_clims_interpolated_output = './interpolated_model_clims' @@ -48,16 +48,16 @@ # filename template for landsea masks ('sftlf') sftlf_filename_template = "sftlf_fx_E3SM-1-0_historical_r1i1p1f1_gr.nc" -generate_sftlf = False # if land surface type mask cannot be found, generate one +generate_sftlf = False # if land surface type mask cannot be found, generate one # Region regions = {"pr": ["global"], - "ua_850": ["global"] - } + "ua_850": ["global"]} # ROOT PATH FOR MODELS CLIMATOLOGIES -#test_data_path = '/work/lee1043/ESGF/E3SMv2/atmos/mon' +# test_data_path = '/work/lee1043/ESGF/E3SMv2/atmos/mon' test_data_path = './clim' + # ROOT PATH FOR OBSERVATIONS # Note that atm/mo/%(variable)/ac will be added to this reference_data_path = '/p/user_pub/PCMDIobs/obs4MIPs_clims' diff --git a/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py b/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py index 2083aa6d7..6df4739bb 100755 --- a/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py +++ b/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py @@ -2,12 +2,12 @@ import datetime import os -import dask +import dask from genutil import StringConstructor -import pcmdi_metrics from pcmdi_metrics.io import xcdat_open +from pcmdi_metrics.mean_climate.lib.pmp_parser import PMPMetricsParser def clim_calc(var, infile, outfile=None, outpath=None, outfilename=None, start=None, end=None): @@ -35,8 +35,6 @@ def clim_calc(var, infile, outfile=None, outpath=None, outfilename=None, start=N print("outdir is ", outdir) - c = d.time # coordinate for time - # CLIM PERIOD if (start is None) and (end is None): # DEFAULT CLIM - BASED ON ENTIRE TIME SERIES @@ -104,7 +102,7 @@ def clim_calc(var, infile, outfile=None, outpath=None, outfilename=None, start=N ver = datetime.datetime.now().strftime("v%Y%m%d") - P = pcmdi_metrics.mean_climate.pmp_parser.PMPMetricsParser() + P = PMPMetricsParser() P.add_argument( "--vars", dest="vars", help="List of variables", nargs="+", required=False diff --git a/pcmdi_metrics/version.py b/pcmdi_metrics/version.py index fc6e44cd4..770f506d7 100644 --- a/pcmdi_metrics/version.py +++ b/pcmdi_metrics/version.py @@ -1,3 +1,3 @@ __version__ = 'v2.3.1' -__git_tag_describe__ = 'v2.3.1-43-gb32976bb' -__git_sha1__ = 'b32976bb02516abe96928f34c195a8b72f2fc078' +__git_tag_describe__ = 'v2.3.1-44-gde78869' +__git_sha1__ = 'de78869f7e3f8512647615ca8175b1972b89112b' From bf8013aad7d44880dea2c02fe94ee4cc1d175de4 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 27 Oct 2022 13:18:56 -0700 Subject: [PATCH 036/130] import path correct --- pcmdi_metrics/mean_climate/mean_climate_driver.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pcmdi_metrics/mean_climate/mean_climate_driver.py b/pcmdi_metrics/mean_climate/mean_climate_driver.py index d431de0d3..5429360d5 100755 --- a/pcmdi_metrics/mean_climate/mean_climate_driver.py +++ b/pcmdi_metrics/mean_climate/mean_climate_driver.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -from pcmdi_metrics.pcmdi import PMPDriver, create_mean_climate_parser +from pcmdi_metrics.mean_climate.lib import PMPDriver, create_mean_climate_parser parser = create_mean_climate_parser() parameter = parser.get_parameter(cmd_default_vars=False, argparse_vals_only=False) From f70f40dd2371701bff7cb2067c84d3e593a5966c Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 27 Oct 2022 13:19:12 -0700 Subject: [PATCH 037/130] clean up --- pcmdi_metrics/version.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pcmdi_metrics/version.py b/pcmdi_metrics/version.py index 770f506d7..cfc21ffb7 100644 --- a/pcmdi_metrics/version.py +++ b/pcmdi_metrics/version.py @@ -1,3 +1,3 @@ __version__ = 'v2.3.1' -__git_tag_describe__ = 'v2.3.1-44-gde78869' -__git_sha1__ = 'de78869f7e3f8512647615ca8175b1972b89112b' +__git_tag_describe__ = 'v2.3.1-45-g8c00d84' +__git_sha1__ = '8c00d84d7cfbbc467bf66cee53426cc6cead729c' From 4b1e424e01bea55419596c71cc5d593cfa11a7b3 Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Thu, 27 Oct 2022 13:22:16 -0700 Subject: [PATCH 038/130] pre-commit clean up --- setup.py | 2 -- share/DefArgsCIA.json | 2 +- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/setup.py b/setup.py index 412814dc0..c316f6e5a 100755 --- a/setup.py +++ b/setup.py @@ -1,9 +1,7 @@ from __future__ import print_function import glob -import os import subprocess -import sys from setuptools import find_packages, setup diff --git a/share/DefArgsCIA.json b/share/DefArgsCIA.json index cd33a055d..8507f33ba 100644 --- a/share/DefArgsCIA.json +++ b/share/DefArgsCIA.json @@ -163,4 +163,4 @@ ], "help":"A list of variables to be processed" } -} \ No newline at end of file +} From de7e291512f7867c42852fe1c8875b44dc98c2ce Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 27 Oct 2022 13:26:04 -0700 Subject: [PATCH 039/130] clean up --- pcmdi_metrics/version.py | 4 ++-- share/DefArgsCIA.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pcmdi_metrics/version.py b/pcmdi_metrics/version.py index cfc21ffb7..a4f2d9b63 100644 --- a/pcmdi_metrics/version.py +++ b/pcmdi_metrics/version.py @@ -1,3 +1,3 @@ __version__ = 'v2.3.1' -__git_tag_describe__ = 'v2.3.1-45-g8c00d84' -__git_sha1__ = '8c00d84d7cfbbc467bf66cee53426cc6cead729c' +__git_tag_describe__ = 'v2.3.1-49-g3dc3a58' +__git_sha1__ = '3dc3a58480110f8093e1f140e74d8585f53249ce' diff --git a/share/DefArgsCIA.json b/share/DefArgsCIA.json index 8507f33ba..cd33a055d 100644 --- a/share/DefArgsCIA.json +++ b/share/DefArgsCIA.json @@ -163,4 +163,4 @@ ], "help":"A list of variables to be processed" } -} +} \ No newline at end of file From ea5e880971c21d4c6ca7a77807f6017131c76453 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 27 Oct 2022 13:29:17 -0700 Subject: [PATCH 040/130] bug fix --- pcmdi_metrics/mean_climate/lib/outputmetrics.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pcmdi_metrics/mean_climate/lib/outputmetrics.py b/pcmdi_metrics/mean_climate/lib/outputmetrics.py index 29d14c459..0d7292039 100644 --- a/pcmdi_metrics/mean_climate/lib/outputmetrics.py +++ b/pcmdi_metrics/mean_climate/lib/outputmetrics.py @@ -4,11 +4,11 @@ import cdms2 -import pcmdi_metrics from pcmdi_metrics import LOG_LEVEL from pcmdi_metrics.io.base import Base from pcmdi_metrics.mean_climate.lib.dataset import DataSet from pcmdi_metrics.mean_climate.lib.observation import Observation +from pcmdi_metrics.mean_climate.lib import compute_metrics try: basestring # noqa @@ -165,14 +165,14 @@ def calculate_and_output_metrics(self, ref, test): ].get(self.parameter.realization, {}) if not self.parameter.dry_run: - pr_rgn = pcmdi_metrics.pcmdi.compute_metrics( + pr_rgn = compute_metrics( self.var_name_long, test_data, ref_data ) # Calling compute_metrics with None for the model and obs returns # the definitions. self.metrics_def_dictionary.update( - pcmdi_metrics.pcmdi.compute_metrics(self.var_name_long, None, None) + compute_metrics(self.var_name_long, None, None) ) if hasattr(self.parameter, "compute_custom_metrics"): pr_rgn.update( From 1a527675bc820b58f77d86dc0fb5759fe11245f5 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 27 Oct 2022 14:13:50 -0700 Subject: [PATCH 041/130] clean up --- pcmdi_metrics/mean_climate/lib/__init__.py | 20 +++++++++++++------ .../lib/mean_climate_metrics_calculations.py | 11 ---------- pcmdi_metrics/version.py | 4 ++-- setup.py | 2 +- 4 files changed, 17 insertions(+), 20 deletions(-) diff --git a/pcmdi_metrics/mean_climate/lib/__init__.py b/pcmdi_metrics/mean_climate/lib/__init__.py index 77dfadab0..f1a95bf68 100644 --- a/pcmdi_metrics/mean_climate/lib/__init__.py +++ b/pcmdi_metrics/mean_climate/lib/__init__.py @@ -1,10 +1,10 @@ -from . import dataset # DataSet +from . import dataset # DataSet # noqa from . import io # noqa -from . import model # Model -from . import observation # OBS, Observation -from . import outputmetrics # OutputMetrics -from . import pmp_parameter # PMPParameter, PMPMetricsParameter -from . import pmp_parser # PMPParser, PMPMetricsParser +from . import model # Model # noqa +from . import observation # OBS, Observation # noqa +from . import outputmetrics # OutputMetrics # noqa +from . import pmp_parameter # PMPParameter, PMPMetricsParameter # noqa +from . import pmp_parser # PMPParser, PMPMetricsParser # noqa from .compute_statistics import ( # noqa annual_mean, bias_xy, @@ -23,3 +23,11 @@ ) from .mean_climate_metrics_calculations import compute_metrics # noqa from .mean_climate_metrics_driver import PMPDriver, create_mean_climate_parser # noqa + +from . import dataset # DataSet # noqa # isort:skip +from . import io # noqa # isort:skip +from . import model # Model # noqa # isort:skip +from . import observation # OBS, Observation # noqa # isort:skip +from . import outputmetrics # OutputMetrics # noqa # isort:skip +from . import pmp_parameter # PMPParameter, PMPMetricsParameter # noqa # isort:skip +from . import pmp_parser # PMPParser, PMPMetricsParser # noqa # isort:skip diff --git a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_calculations.py b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_calculations.py index 4da00ddfc..75ee28913 100644 --- a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_calculations.py +++ b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_calculations.py @@ -1,4 +1,3 @@ -import collections import cdms2 as cdms import MV2 @@ -225,16 +224,6 @@ def compute_metrics(Var, dm, do): meanObs_xy_mo_l.append(format(meanObs_xy_mo * conv, sig_digits)) mean_xy_mo_l.append(format(mean_xy_mo * conv, sig_digits)) - # metrics_dictionary['bias_xy'][mo] = format( bias_mo * conv, sig_digits) - # metrics_dictionary['rms_xy'][mo] = format( rms_mo * conv, sig_digits) - # metrics_dictionary['rmsc_xy'][mo] = format( rmsc_mo * conv, sig_digits) - # metrics_dictionary['cor_xy'][mo] = format( cor_mo, '.2f') - # metrics_dictionary['mae_xy'][mo] = format( mae_mo * conv, sig_digits) - # metrics_dictionary['std-obs_xy'][mo] = format( stdObs_xy_mo * conv, sig_digits) - # metrics_dictionary['std_xy'][mo] = format( std_xy_mo * conv, sig_digits) - # metrics_dictionary['mean-obs_xy'][mo] = format( meanObs_xy_mo * conv, sig_digits) - # metrics_dictionary['mean_xy'][mo] = format( mean_xy_mo * conv, sig_digits) - metrics_dictionary["bias_xy"]["CalendarMonths"] = bias_mo_l metrics_dictionary["rms_xy"]["CalendarMonths"] = rms_mo_l metrics_dictionary["rmsc_xy"]["CalendarMonths"] = rmsc_mo_l diff --git a/pcmdi_metrics/version.py b/pcmdi_metrics/version.py index a4f2d9b63..023ae5288 100644 --- a/pcmdi_metrics/version.py +++ b/pcmdi_metrics/version.py @@ -1,3 +1,3 @@ __version__ = 'v2.3.1' -__git_tag_describe__ = 'v2.3.1-49-g3dc3a58' -__git_sha1__ = '3dc3a58480110f8093e1f140e74d8585f53249ce' +__git_tag_describe__ = 'v2.3.1-51-gea5e880' +__git_sha1__ = 'ea5e880971c21d4c6ca7a77807f6017131c76453' diff --git a/setup.py b/setup.py index c316f6e5a..155df2524 100755 --- a/setup.py +++ b/setup.py @@ -40,7 +40,7 @@ p = subprocess.Popen(["python", "setup_default_args.py"], cwd="share") p.communicate() -packages = find_packages(exclude=["cmec", "tests"]) +packages = find_packages(exclude=["cmec", "tests"], include=["pcmdi_metrics*"]) scripts = [ "pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py", From 393e627d39535b323f756b5770971102acec512c Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 27 Oct 2022 14:22:30 -0700 Subject: [PATCH 042/130] clean up --- pcmdi_metrics/io/base.py | 1 + pcmdi_metrics/version.py | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/pcmdi_metrics/io/base.py b/pcmdi_metrics/io/base.py index feb4cecee..9f49533b1 100755 --- a/pcmdi_metrics/io/base.py +++ b/pcmdi_metrics/io/base.py @@ -389,6 +389,7 @@ def mask_var(self, var): def set_target_grid_and_mask_in_var(self, var): if self.target_grid is not None: + print('jwlee-test-regrid, var.shape:', var.shape) var = var.regrid( self.target_grid, regridTool=self.regrid_tool, diff --git a/pcmdi_metrics/version.py b/pcmdi_metrics/version.py index 023ae5288..6e1213145 100644 --- a/pcmdi_metrics/version.py +++ b/pcmdi_metrics/version.py @@ -1,3 +1,3 @@ __version__ = 'v2.3.1' -__git_tag_describe__ = 'v2.3.1-51-gea5e880' -__git_sha1__ = 'ea5e880971c21d4c6ca7a77807f6017131c76453' +__git_tag_describe__ = 'v2.3.1-52-g1a52767' +__git_sha1__ = '1a527675bc820b58f77d86dc0fb5759fe11245f5' From 0d68f6167c812b579a978c007c825e46b20c8f33 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 27 Oct 2022 14:23:24 -0700 Subject: [PATCH 043/130] clean up --- pcmdi_metrics/mean_climate/lib/__init__.py | 7 ------- pcmdi_metrics/version.py | 4 ++-- 2 files changed, 2 insertions(+), 9 deletions(-) diff --git a/pcmdi_metrics/mean_climate/lib/__init__.py b/pcmdi_metrics/mean_climate/lib/__init__.py index f1a95bf68..5675e0843 100644 --- a/pcmdi_metrics/mean_climate/lib/__init__.py +++ b/pcmdi_metrics/mean_climate/lib/__init__.py @@ -1,10 +1,3 @@ -from . import dataset # DataSet # noqa -from . import io # noqa -from . import model # Model # noqa -from . import observation # OBS, Observation # noqa -from . import outputmetrics # OutputMetrics # noqa -from . import pmp_parameter # PMPParameter, PMPMetricsParameter # noqa -from . import pmp_parser # PMPParser, PMPMetricsParser # noqa from .compute_statistics import ( # noqa annual_mean, bias_xy, diff --git a/pcmdi_metrics/version.py b/pcmdi_metrics/version.py index 6e1213145..495f0633c 100644 --- a/pcmdi_metrics/version.py +++ b/pcmdi_metrics/version.py @@ -1,3 +1,3 @@ __version__ = 'v2.3.1' -__git_tag_describe__ = 'v2.3.1-52-g1a52767' -__git_sha1__ = '1a527675bc820b58f77d86dc0fb5759fe11245f5' +__git_tag_describe__ = 'v2.3.1-53-g393e627' +__git_sha1__ = '393e627d39535b323f756b5770971102acec512c' From 06560d3ecd4927a9083c6e1c56fff218e25c9ef6 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 27 Oct 2022 14:37:24 -0700 Subject: [PATCH 044/130] bug fix --- .../mean_climate/lib/mean_climate_metrics_calculations.py | 1 + pcmdi_metrics/version.py | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_calculations.py b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_calculations.py index 75ee28913..1e262f08b 100644 --- a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_calculations.py +++ b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_calculations.py @@ -4,6 +4,7 @@ from genutil import grower import pcmdi_metrics +import collections def compute_metrics(Var, dm, do): diff --git a/pcmdi_metrics/version.py b/pcmdi_metrics/version.py index 495f0633c..c809e8c1c 100644 --- a/pcmdi_metrics/version.py +++ b/pcmdi_metrics/version.py @@ -1,3 +1,3 @@ __version__ = 'v2.3.1' -__git_tag_describe__ = 'v2.3.1-53-g393e627' -__git_sha1__ = '393e627d39535b323f756b5770971102acec512c' +__git_tag_describe__ = 'v2.3.1-54-g0d68f61' +__git_sha1__ = '0d68f6167c812b579a978c007c825e46b20c8f33' From 48573380a221a80d0763908a8454d3459e04757e Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 27 Oct 2022 16:12:45 -0700 Subject: [PATCH 045/130] add readme and clean up --- pcmdi_metrics/mean_climate/README.md | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 pcmdi_metrics/mean_climate/README.md diff --git a/pcmdi_metrics/mean_climate/README.md b/pcmdi_metrics/mean_climate/README.md new file mode 100644 index 000000000..f2d1dc4d1 --- /dev/null +++ b/pcmdi_metrics/mean_climate/README.md @@ -0,0 +1,13 @@ +# PMP Mean Climate Metrics + +## STEP 1. Calculate annual cycle and seasonal mean, and archive + +Example usage: + +```pcmdi_compute_climatologies.py -p param/basic_annual_cycle_param.py``` + +## STEP 2. Compute metrics + +Example usage: + +```mean_climate_driver.py -p param/basic_param.py``` From 709ffeeb1a9d13f11c8b443a9495a6a70f0fcb15 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Fri, 28 Oct 2022 20:11:53 -0700 Subject: [PATCH 046/130] some progress toward using xcdat for step 2 mean clim metric calculations .... --- pcmdi_metrics/io/__init__.py | 2 +- pcmdi_metrics/io/base.py | 45 +++++++++++++-- pcmdi_metrics/io/xcdat_openxml.py | 14 ++--- pcmdi_metrics/mean_climate/lib/dataset.py | 10 +++- .../lib/mean_climate_metrics_driver.py | 8 ++- pcmdi_metrics/mean_climate/lib/model.py | 6 +- pcmdi_metrics/mean_climate/lib/observation.py | 7 ++- .../mean_climate/lib/outputmetrics.py | 55 +++++++++++++++---- pcmdi_metrics/version.py | 4 +- 9 files changed, 119 insertions(+), 32 deletions(-) diff --git a/pcmdi_metrics/io/__init__.py b/pcmdi_metrics/io/__init__.py index fbf02bef0..62b92a3a0 100644 --- a/pcmdi_metrics/io/__init__.py +++ b/pcmdi_metrics/io/__init__.py @@ -1,4 +1,4 @@ # init for pcmdi_metrics.io +from .xcdat_openxml import xcdat_open # noqa # isort:skip from . import base # noqa from .base import MV2Json # noqa -from .xcdat_openxml import xcdat_open # noqa diff --git a/pcmdi_metrics/io/base.py b/pcmdi_metrics/io/base.py index 9f49533b1..430a4bbab 100755 --- a/pcmdi_metrics/io/base.py +++ b/pcmdi_metrics/io/base.py @@ -4,6 +4,7 @@ import logging import os import re +import sys from collections import OrderedDict from collections.abc import Mapping @@ -14,9 +15,11 @@ import genutil import MV2 import numpy +import xcdat import pcmdi_metrics from pcmdi_metrics import LOG_LEVEL +from pcmdi_metrics.io import xcdat_open value = 0 cdms2.setNetcdfShuffleFlag(value) # where value is either 0 or 1 @@ -339,6 +342,7 @@ def get_dimensions(json_dict, json_structure): ) def get(self, var, var_in_file=None, region={}, *args, **kwargs): + print('jwlee-test-get, var, var_in_file:', var, var_in_file) self.variable = var self.var_from_file = self.extract_var_from_file( var, var_in_file, *args, **kwargs @@ -352,7 +356,7 @@ def get(self, var, var_in_file=None, region={}, *args, **kwargs): if self.is_masking(): self.var_from_file = self.mask_var(self.var_from_file) - self.var_from_file = self.set_target_grid_and_mask_in_var(self.var_from_file) + self.var_from_file = self.set_target_grid_and_mask_in_var(self.var_from_file, var) self.var_from_file = self.set_domain_in_var(self.var_from_file, self.region) @@ -362,12 +366,17 @@ def extract_var_from_file(self, var, var_in_file, *args, **kwargs): if var_in_file is None: var_in_file = var # self.extension = 'nc' + """ var_file = cdms2.open(self(), "r") for att in ["var_in_file,", "varInFile"]: if att in kwargs: del kwargs[att] extracted_var = var_file(var_in_file, *args, **kwargs) var_file.close() + """ + ds = xcdat_open(self(), data_var=var_in_file, decode_times=False) + extracted_var = ds + return extracted_var def is_masking(self): @@ -387,8 +396,17 @@ def mask_var(self, var): mask = MV2.not_equal(mask, self.value) return MV2.masked_where(mask, var) - def set_target_grid_and_mask_in_var(self, var): + def set_target_grid_and_mask_in_var(self, var, var_in_file): + """ + self: object + dir(self): ['__abstractmethods__', '__call__', '__class__', '__delattr__', '__dict__', '__dir__', '__doc__', '__eq__', '__format__', '__ge__', '__getattribute__', '__gt__', '__hash__', '__init__', '__init_subclass__', '__le__', '__lt__', '__module__', '__ne__', '__new__', '__reduce__', '__reduce_ex__', '__repr__', '__setattr__', '__sizeof__', '__str__', '__subclasshook__', '__weakref__', '_abc_impl', 'case_id', 'construct', 'ext', 'extract_var_from_file', 'file_mask_template', 'get', 'get_mask_from_var', 'hash', 'is_masking', 'keys', 'mask', 'mask_var', 'model_version', 'period', 'read', 'realization', 'realm', 'region', 'regrid_method', 'regrid_tool', 'reverse', 'root', 'set_domain_in_var', 'set_file_mask_template', 'set_target_grid', 'set_target_grid_and_mask_in_var', 'setup_cdms2', 'table', 'target_grid', 'target_grid_name', 'target_mask', 'template', 'type', 'value', 'var_from_file', 'variable', 'write', 'write_cmec'] + self(): string, path to input file + """ + print('jwlee-test-regrid, set_target_grid_and_mask_in_var start') + #print('jwlee-test-regrid, self.target_grid:', self.target_grid) if self.target_grid is not None: + #print('jwlee-test-regrid, var[var_in_file].shape:', var[var_in_file].shape) + """ print('jwlee-test-regrid, var.shape:', var.shape) var = var.regrid( self.target_grid, @@ -398,14 +416,31 @@ def set_target_grid_and_mask_in_var(self, var): diag={}, periodicity=1, ) + """ + #print('jwlee-test-regrid, dir(self):', dir(self)) + print('jwlee-test-regrid, type(self):', type(self)) + print('jwlee-test-regrid, type(self()):', type(self())) + print('jwlee-test-regrid, self():', self()) + print('jwlee-test-regrid, regridder start, var_in_file:', var_in_file) + var.to_netcdf(self().split('/')[-1].split('.nc')[0] + '_test1-org.nc') + var = var.regridder.horizontal(var_in_file, self.target_grid, tool=self.regrid_tool) + print('jwlee-test-regrid, regridder done') + var.to_netcdf(self().split('/')[-1].split('.nc')[0]+'_test2-regridded.nc') + + print('jwlee-test-regrid-2, var[var_in_file].shape:', var[var_in_file].shape) + + print('jwlee-test-regrid-3, self.target_mask:', self.target_mask) if self.target_mask is not None: - if self.target_mask.shape != var.shape: + #if self.target_mask.shape != var.shape: + if self.target_mask.shape != var[var_in_file].shape: dummy, mask = genutil.grower(var, self.target_mask) else: mask = self.target_mask var = MV2.masked_where(mask, var) + print('jwlee-test-regrid-4, set_target_grid_and_mask_in_var done') + return var def set_domain_in_var(self, var, region): @@ -445,7 +480,9 @@ def set_target_grid(self, target, regrid_tool="esmf", regrid_method="linear"): self.regrid_tool = regrid_tool self.regrid_method = regrid_method if target == "2.5x2.5": - self.target_grid = cdms2.createUniformGrid(-88.875, 72, 2.5, 0, 144, 2.5) + print('jwlee-test, set_target_grid, start') + #self.target_grid = cdms2.createUniformGrid(-88.875, 72, 2.5, 0, 144, 2.5) + self.target_grid = xcdat.create_uniform_grid(-88.875, 88.625, 2.5, 0, 357.5, 2.5) self.target_grid_name = target elif cdms2.isGrid(target): self.target_grid = target diff --git a/pcmdi_metrics/io/xcdat_openxml.py b/pcmdi_metrics/io/xcdat_openxml.py index c97a6786b..0135481c0 100644 --- a/pcmdi_metrics/io/xcdat_openxml.py +++ b/pcmdi_metrics/io/xcdat_openxml.py @@ -6,7 +6,7 @@ import xmltodict -def xcdat_open(infile, data_var=None): +def xcdat_open(infile, data_var=None, decode_times=True): """ Parameter --------- @@ -22,17 +22,17 @@ def xcdat_open(infile, data_var=None): xcdat dataset """ if isinstance(infile, list): - ds = xcdat.open_mfdataset(infile, data_var=data_var) + ds = xcdat.open_mfdataset(infile, data_var=data_var, decode_times=decode_times) else: if infile.split('.')[-1].lower() == 'xml': - ds = xcdat_openxml(infile, data_var=data_var) + ds = xcdat_openxml(infile, data_var=data_var, decode_times=decode_times) else: - ds = xcdat.open_dataset(infile, data_var=data_var) + ds = xcdat.open_dataset(infile, data_var=data_var, decode_times=decode_times) return ds -def xcdat_openxml(xmlfile, data_var=None): +def xcdat_openxml(xmlfile, data_var=None, decode_times=True): """ Parameter --------- @@ -55,8 +55,8 @@ def xcdat_openxml(xmlfile, data_var=None): ncfile_list = glob.glob(os.path.join(doc['dataset']['@directory'], '*.nc')) if len(ncfile_list) > 1: - ds = xcdat.open_mfdataset(ncfile_list, data_var=data_var) + ds = xcdat.open_mfdataset(ncfile_list, data_var=data_var, decode_times=decode_times) else: - ds = xcdat.open_dataset(ncfile_list[0], data_var=data_var) + ds = xcdat.open_dataset(ncfile_list[0], data_var=data_var, decode_times=decode_times) return ds diff --git a/pcmdi_metrics/mean_climate/lib/dataset.py b/pcmdi_metrics/mean_climate/lib/dataset.py index d137806fc..537908458 100644 --- a/pcmdi_metrics/mean_climate/lib/dataset.py +++ b/pcmdi_metrics/mean_climate/lib/dataset.py @@ -5,6 +5,7 @@ import cdms2 import cdutil +import xcdat from six import with_metaclass from pcmdi_metrics import resources @@ -94,14 +95,19 @@ def create_sftlf(parameter): sftlf[test] = {"raw": None} sftlf[test]["filename"] = None sftlf[test]["md5"] = None + print('jwlee-test-target_grid-create') if parameter.target_grid == "2.5x2.5": - t_grid = cdms2.createUniformGrid(-88.875, 72, 2.5, 0, 144, 2.5) + t_grid_cdms2 = cdms2.createUniformGrid(-88.875, 72, 2.5, 0, 144, 2.5) + t_grid = xcdat.create_uniform_grid(-88.875, 88.625, 2.5, 0, 357.5, 2.5) else: t_grid = parameter.target_grid + print('jwlee-test-target_grid-create done') - sft = cdutil.generateLandSeaMask(t_grid) + #sft = cdutil.generateLandSeaMask(t_grid) + sft = cdutil.generateLandSeaMask(t_grid_cdms2) sft[:] = sft.filled(1.0) * 100.0 sftlf["target_grid"] = sft + print('jwlee-test-target_grid, type(sft), sft.shape:', type(sft), sft.shape) return sftlf diff --git a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py index 08e10596a..c6cba41a8 100644 --- a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py +++ b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py @@ -212,10 +212,13 @@ def run_reference_and_test_comparison(self): ) break - try: + #try: + if 1: print('jwlee-test-2: type(self), ref, tst:', type(self), ref, tst) - print('jwlee-test-2: tst().shape:', tst().shape) + print('jwlee-test-2: self.var, self.var_name_long:', self.var, self.var_name_long) + print('jwlee-test-2: tst()[self.var].shape:', tst()[self.var].shape) self.output_metric.calculate_and_output_metrics(ref, tst) + """ except RuntimeError: continue except Exception as err: @@ -226,6 +229,7 @@ def run_reference_and_test_comparison(self): ) logging.getLogger("pcmdi_metrics").info(err_msg) break + """ def is_data_set_obs(self, data_set): """Is data_set (which is either a test or reference) an obs?""" diff --git a/pcmdi_metrics/mean_climate/lib/model.py b/pcmdi_metrics/mean_climate/lib/model.py index 5de1d3160..ed74cbd0d 100644 --- a/pcmdi_metrics/mean_climate/lib/model.py +++ b/pcmdi_metrics/mean_climate/lib/model.py @@ -58,7 +58,9 @@ def setup_target_mask(self): def get(self): """Gets the variable based on the region and level (if given) for the file from data_path, which is defined in the initalizer.""" - try: + print('jwlee-test-get: self.var_in_file, self.region:', self.var_in_file, self.region) + #try: + if 1: if self.level is None: data_model = self._model_file.get( self.var, var_in_file=self.var_in_file, region=self.region @@ -73,12 +75,14 @@ def get(self): return data_model + """ except Exception as e: msg = "Failed to get variables %s for versions: %s, error: %s" logging.getLogger("pcmdi_metrics").error( msg % (self.var, self.obs_or_model, e) ) raise RuntimeError("Need to skip model: %s" % self.obs_or_model) + """ def get_var_in_file(self): """Based off the model_tweaks parameter, get the variable mapping.""" diff --git a/pcmdi_metrics/mean_climate/lib/observation.py b/pcmdi_metrics/mean_climate/lib/observation.py index 4bc2b970d..ba3e87534 100644 --- a/pcmdi_metrics/mean_climate/lib/observation.py +++ b/pcmdi_metrics/mean_climate/lib/observation.py @@ -131,7 +131,9 @@ def setup_target_mask(self): def get(self): """Gets the variable based on the region and level (if given) for the file from data_path, which is defined in the initializer.""" - try: + #try: + if 1: + print('jwlee-test-observation-get, self.level:', self.level) if self.level is not None: data_obs = self._obs_file.get( self.var, level=self.level, region=self.region @@ -139,6 +141,7 @@ def get(self): else: data_obs = self._obs_file.get(self.var, region=self.region) return data_obs + """ except Exception as e: if self.level is not None: logging.getLogger("pcmdi_metrics").error( @@ -152,7 +155,7 @@ def get(self): "Failed opening 3D OBS", self.var, self.obs_or_model, e ) ) - + """ def hash(self): """Return a hash of the file.""" return self._obs_file.hash() diff --git a/pcmdi_metrics/mean_climate/lib/outputmetrics.py b/pcmdi_metrics/mean_climate/lib/outputmetrics.py index 0d7292039..513196c32 100644 --- a/pcmdi_metrics/mean_climate/lib/outputmetrics.py +++ b/pcmdi_metrics/mean_climate/lib/outputmetrics.py @@ -120,13 +120,20 @@ def calculate_and_output_metrics(self, ref, test): self.metrics_dictionary["References"][ref.obs_or_model] = self.obs_var_ref - try: + print('jwlee-test-calculate_and_output_metrics, self.obs_var_ref:', self.obs_var_ref) + + ref_data = None + + #try: + if 1: ref_data = ref() + """ except Exception as e: msg = "Error while processing observation %s for variables %s:\n\t%s" logging.getLogger("pcmdi_metrics").error( msg % (ref.obs_or_model, self.var, str(e)) ) + """ if ref_data is None: # Something went bad! raise RuntimeError("Could not load reference {}".format(ref.obs_or_model)) @@ -139,18 +146,34 @@ def calculate_and_output_metrics(self, ref, test): raise RuntimeError("Need to skip model: %s" % test.obs_or_model) # Todo: Make this a fcn - print('jwlee-test-2-1, test().shape:', test().shape) - print('jwlee-test-2-2, test_data.shape:', test_data.shape) - self.set_grid_in_metrics_dictionary(test_data) - print('jwlee-test-2-3, test_data.shape:', test_data.shape) - - if ref_data.shape != test_data.shape: + #print('jwlee-test-calculate_and_output_metrics, type(test):', type(test)) + #print('jwlee-test-2-1, test().shape:', test().shape) + #print('jwlee-test-2-2, test_data.shape:', test_data.shape) + print('jwlee-test-calculate_and_output_metrics, grid_in_metrics_dict start') + self.set_grid_in_metrics_dictionary(test_data, self.var) + print('jwlee-test-calculate_and_output_metrics, grid_in_metrics_dict done') + #print('jwlee-test-2-3, test_data.shape:', test_data.shape) + + print('jwlee-test type(ref_data), type(test_data):', type(ref_data), type(test_data)) + print('jwlee-test ref_data:', ref_data) + print('jwlee-test test_data:', test_data) + print('jwlee-test ref_data[self.var]:', ref_data[self.var]) + print('jwlee-test test_data[self.var]:', test_data[self.var]) + print('jwlee-test ref_data[self.var].shape:', ref_data[self.var].shape) + print('jwlee-test test_data[self.var].shape:', test_data[self.var].shape) + + #if ref_data.shape != test_data.shape: + if ref_data[self.var].shape != test_data[self.var].shape: + print('jwlee-test raise runtime error') raise RuntimeError( - "Two data sets have different shapes. %s vs %s" - % (ref_data.shape, test_data.shape) + "Two data sets have different shapes. {} vs {}".format( + str(ref_data[self.var].shape), str(test_data[self.var].shape)) + #% (ref_data.shape, test_data.shape) ) + print('jwlee-test-calculate_and_output_metrics, set_simulation_desc start') self.set_simulation_desc(test, test_data) + print('jwlee-test-calculate_and_output_metrics, set_simulation_desc done') if ( ref.obs_or_model @@ -165,9 +188,13 @@ def calculate_and_output_metrics(self, ref, test): ].get(self.parameter.realization, {}) if not self.parameter.dry_run: + print('jwlee-test-calculate_and_output_metrics, compute_metrics start') + print('jwlee-test-calculate_and_output_metrics, self.var_name_long:', self.var_name_long) + pr_rgn = compute_metrics( self.var_name_long, test_data, ref_data ) + print('jwlee-test-calculate_and_output_metrics, compute_metrics done') # Calling compute_metrics with None for the model and obs returns # the definitions. @@ -207,14 +234,20 @@ def calculate_and_output_metrics(self, ref, test): else: self.write_on_exit(False) - def set_grid_in_metrics_dictionary(self, test_data): + def set_grid_in_metrics_dictionary(self, test_data, var): """Set the grid in metrics_dictionary.""" + print('jwlee-test set_grid_in_metrics_dictionary start') grid = {} grid["RegridMethod"] = self.regrid_method grid["RegridTool"] = self.regrid_tool grid["GridName"] = self.parameter.target_grid - grid["GridResolution"] = test_data.shape[1:] + print('jwlee-test set_grid_in_metrics_dictionary middle') + print('jwlee-test var:', var) + #print('jwlee-test dir(test_data):', dir(test_data)) + #grid["GridResolution"] = test_data.shape[1:] + grid["GridResolution"] = test_data[var].shape[1:] self.metrics_dictionary["GridInfo"] = grid + print('jwlee-test set_grid_in_metrics_dictionary done') def set_simulation_desc(self, test, test_data): """Fillout information for the output .json and .txt files.""" diff --git a/pcmdi_metrics/version.py b/pcmdi_metrics/version.py index c809e8c1c..b9f2d857b 100644 --- a/pcmdi_metrics/version.py +++ b/pcmdi_metrics/version.py @@ -1,3 +1,3 @@ __version__ = 'v2.3.1' -__git_tag_describe__ = 'v2.3.1-54-g0d68f61' -__git_sha1__ = '0d68f6167c812b579a978c007c825e46b20c8f33' +__git_tag_describe__ = 'v2.3.1-56-g4857338' +__git_sha1__ = '48573380a221a80d0763908a8454d3459e04757e' From de76499b8e1d5476c9af515971a1b0a1b3ee2f43 Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Sun, 30 Oct 2022 11:05:08 -0700 Subject: [PATCH 047/130] conversion of metric calculations in progress --- pcmdi_metrics/mean_climate/lib/__init__.py | 1 - .../mean_climate/lib/compute_statistics.py | 78 +++++++-------- .../lib/mean_climate_metrics_calculations.py | 96 +++++++++---------- 3 files changed, 82 insertions(+), 93 deletions(-) diff --git a/pcmdi_metrics/mean_climate/lib/__init__.py b/pcmdi_metrics/mean_climate/lib/__init__.py index 5675e0843..5d395052c 100644 --- a/pcmdi_metrics/mean_climate/lib/__init__.py +++ b/pcmdi_metrics/mean_climate/lib/__init__.py @@ -2,7 +2,6 @@ annual_mean, bias_xy, cor_xy, - cor_xyt, mean_xy, meanabs_xy, rms_0, diff --git a/pcmdi_metrics/mean_climate/lib/compute_statistics.py b/pcmdi_metrics/mean_climate/lib/compute_statistics.py index 3aec8aecf..08b23c48c 100644 --- a/pcmdi_metrics/mean_climate/lib/compute_statistics.py +++ b/pcmdi_metrics/mean_climate/lib/compute_statistics.py @@ -1,10 +1,11 @@ -import cdms2 +#import cdms2 import cdutil import genutil import MV2 +import xcdat -def annual_mean(dm, do): +def annual_mean(dm, do, var=None): """Computes ANNUAL MEAN""" if dm is None and do is None: # just want the doc return { @@ -15,12 +16,14 @@ def annual_mean(dm, do): "Contact": "pcmdi-metrics@llnl.gov", "Comments": "Assumes input are 12 months climatology", } - # Do we really want this? Wouldn't it better to let it fails - cdms2.setAutoBounds("on") - return cdutil.averager(dm, axis="t"), cdutil.averager(do, axis="t") + #cdms2.setAutoBounds("on") + #return cdutil.averager(dm, axis="t"), cdutil.averager(do, axis="t") + dm_am = dm.temporal.average(var) + do_am = do.temporal.average(var) + -def bias_xy(dm, do): +def bias_xy(dm, do, var=None): """Computes bias""" if dm is None and do is None: # just want the doc return { @@ -28,11 +31,12 @@ def bias_xy(dm, do): "Abstract": "Compute Full Average of Model - Observation", "Contact": "pcmdi-metrics@llnl.gov", } - dif = MV2.subtract(dm, do) - return MV2.float(cdutil.averager(dif, axis="xy", weights="weighted")) + dm['dif'] = dm[var] - do[var] + stat = dm.spatial.average('dif', axis=['X', 'Y'])['dif'].values + return float(stat) -def bias_xyt(dm, do): +def bias_xyt(dm, do, var=None): """Computes bias""" if dm is None and do is None: # just want the doc return { @@ -41,10 +45,12 @@ def bias_xyt(dm, do): "Contact": "pcmdi-metrics@llnl.gov", } dif = MV2.subtract(dm, do) - return MV2.float(cdutil.averager(dif, axis="xyt", weights="weighted")) + dm['dif'] = dm[var] - do[var] + stat = dm.spatial.average('dif', axis=['X', 'Y']).temporal.average('absdif')['absdif'].values + return float(stat) -def cor_xy(dm, do): +def cor_xy(dm, do, var=None): """Computes correlation""" if dm is None and do is None: # just want the doc return { @@ -57,20 +63,7 @@ def cor_xy(dm, do): return float(genutil.statistics.correlation(dm, do, axis="xy", weights="weighted")) -def cor_xyt(dm, do): - """Computes correlation""" - if dm is None and do is None: # just want the doc - return { - "Name": "Spatial and Temporal Correlation", - "Abstract": "Compute Spatio-Temporal Correlation", - "URI": "http://uvcdat.llnl.gov/documentation/utilities/" - + "utilities-2.html", - "Contact": "pcmdi-metrics@llnl.gov", - } - return float(genutil.statistics.correlation(dm, do, axis="xyt", weights="weighted")) - - -def mean_xy(d): +def mean_xy(d, var=None): """Computes bias""" if d is None: # just want the doc return { @@ -78,10 +71,11 @@ def mean_xy(d): "Abstract": "Area Mean (area weighted)", "Contact": "pcmdi-metrics@llnl.gov", } - return MV2.float(cdutil.averager(d, axis="xy", weights="weighted")) + mean_xy = d.spatial.average(var, axis=['X', 'Y']).values + return float(mean_xy) -def meanabs_xy(dm, do): +def meanabs_xy(dm, do, var=None): """Computes Mean Absolute Error""" if dm is None and do is None: # just want the doc return { @@ -90,12 +84,12 @@ def meanabs_xy(dm, do): + "Absolute Difference Between Model And Observation", "Contact": "pcmdi-metrics@llnl.gov", } - absdif = MV2.absolute(MV2.subtract(dm, do)) - mae = cdutil.averager(absdif, axis="xy", weights="weighted") - return float(mae) + dm['absdif'] = abs(dm[var] - do[var]) + stat = dm.spatial.average('absdif', axis=['X', 'Y'])['absdif'].values + return float(stat) -def meanabs_xyt(dm, do): +def meanabs_xyt(dm, do, var=None): """Computes Mean Absolute Error""" if dm is None and do is None: # just want the doc return { @@ -104,12 +98,12 @@ def meanabs_xyt(dm, do): + "Absolute Difference Between Model And Observation", "Contact": "pcmdi-metrics@llnl.gov", } - absdif = MV2.absolute(MV2.subtract(dm, do)) - mae = cdutil.averager(absdif, axis="xyt", weights="weighted") - return float(mae) + dm['absdif'] = abs(dm[var] - do[var]) + stat = dm.spatial.average('absdif', axis=['X', 'Y']).temporal.average('absdif')['absdif'].values + return float(stat) -def rms_0(dm, do): +def rms_0(dm, do, var=None): """Computes rms over first axis""" if dm is None and do is None: # just want the doc return { @@ -125,7 +119,7 @@ def rms_0(dm, do): return float(genutil.statistics.rms(dm, do)) -def rms_xy(dm, do): +def rms_xy(dm, do, var=None): """Computes rms""" if dm is None and do is None: # just want the doc return { @@ -138,7 +132,7 @@ def rms_xy(dm, do): return float(genutil.statistics.rms(dm, do, axis="xy", weights="weighted")) -def rms_xyt(dm, do): +def rms_xyt(dm, do, var=None): """Computes rms""" if dm is None and do is None: # just want the doc return { @@ -151,7 +145,7 @@ def rms_xyt(dm, do): return float(genutil.statistics.rms(dm, do, axis="xyt", weights="weighted")) -def rmsc_xy(dm, do): +def rmsc_xy(dm, do, var=None): """Computes centered rms""" if dm is None and do is None: # just want the doc return { @@ -166,7 +160,7 @@ def rmsc_xy(dm, do): ) -def seasonal_mean(d, sea): +def seasonal_mean(d, sea, var=None): """Computes SEASONAL MEAN""" if d is None and sea is None: # just want the doc return { @@ -198,7 +192,7 @@ def seasonal_mean(d, sea): return d_sea -def std_xy(d): +def std_xy(d, var=None): """Computes std""" if d is None: # just want the doc return { @@ -211,7 +205,7 @@ def std_xy(d): return float(genutil.statistics.std(d, axis="xy", weights="weighted")) -def std_xyt(d): +def std_xyt(d, var=None): """Computes std""" if d is None: # just want the doc return { @@ -224,7 +218,7 @@ def std_xyt(d): return float(genutil.statistics.std(d, axis="xyt", weights="weighted")) -def zonal_mean(dm, do): +def zonal_mean(dm, do, var=None): """Computes ZONAL MEAN assumes rectilinear/regular grid""" if dm is None and do is None: # just want the doc return { diff --git a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_calculations.py b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_calculations.py index 1e262f08b..590d48b03 100644 --- a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_calculations.py +++ b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_calculations.py @@ -1,7 +1,7 @@ -import cdms2 as cdms -import MV2 -from genutil import grower +#import cdms2 as cdms +#import MV2 +#from genutil import grower import pcmdi_metrics import collections @@ -18,9 +18,6 @@ def compute_metrics(Var, dm, do): metrics_defs["rmsc_xy"] = pcmdi_metrics.mean_climate.lib.rmsc_xy(None, None) metrics_defs["bias_xy"] = pcmdi_metrics.mean_climate.lib.bias_xy(None, None) metrics_defs["mae_xy"] = pcmdi_metrics.mean_climate.lib.meanabs_xy(None, None) - # metrics_defs["cor_xyt"] = pcmdi_metrics.mean_climate.lib.cor_xyt( - # None, - # None) metrics_defs["cor_xy"] = pcmdi_metrics.mean_climate.lib.cor_xy(None, None) metrics_defs["mean_xy"] = pcmdi_metrics.mean_climate.lib.mean_xy(None) metrics_defs["std_xy"] = pcmdi_metrics.mean_climate.lib.std_xy(None) @@ -34,7 +31,7 @@ def compute_metrics(Var, dm, do): ) metrics_defs["zonal_mean"] = pcmdi_metrics.mean_climate.lib.zonal_mean(None, None) return metrics_defs - cdms.setAutoBounds("on") + #cdms.setAutoBounds("on") metrics_dictionary = {} # SET CONDITIONAL ON INPUT VARIABLE @@ -49,54 +46,53 @@ def compute_metrics(Var, dm, do): sig_digits = ".3f" # CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD - rms_xyt = pcmdi_metrics.mean_climate.lib.rms_xyt(dm, do) - # cor_xyt = pcmdi_metrics.mean_climate.lib.cor_xyt(dm, do) - stdObs_xyt = pcmdi_metrics.mean_climate.lib.std_xyt(do) - std_xyt = pcmdi_metrics.mean_climate.lib.std_xyt(dm) + rms_xyt = pcmdi_metrics.mean_climate.lib.rms_xyt(dm, do, var) + stdObs_xyt = pcmdi_metrics.mean_climate.lib.std_xyt(do, var) + std_xyt = pcmdi_metrics.mean_climate.lib.std_xyt(dm, var) # CALCULATE ANNUAL MEANS - dm_am, do_am = pcmdi_metrics.mean_climate.lib.annual_mean(dm, do) + dm_am, do_am = pcmdi_metrics.mean_climate.lib.annual_mean(dm, do, var) # CALCULATE ANNUAL MEAN BIAS - bias_xy = pcmdi_metrics.mean_climate.lib.bias_xy(dm_am, do_am) + bias_xy = pcmdi_metrics.mean_climate.lib.bias_xy(dm_am, do_am, var) # CALCULATE MEAN ABSOLUTE ERROR - mae_xy = pcmdi_metrics.mean_climate.lib.meanabs_xy(dm_am, do_am) + mae_xy = pcmdi_metrics.mean_climate.lib.meanabs_xy(dm_am, do_am, var) # CALCULATE ANNUAL MEAN RMS (centered and uncentered) - rms_xy = pcmdi_metrics.mean_climate.lib.rms_xy(dm_am, do_am) - rmsc_xy = pcmdi_metrics.mean_climate.lib.rmsc_xy(dm_am, do_am) + rms_xy = pcmdi_metrics.mean_climate.lib.rms_xy(dm_am, do_am, var) + rmsc_xy = pcmdi_metrics.mean_climate.lib.rmsc_xy(dm_am, do_am, var) # CALCULATE ANNUAL MEAN CORRELATION - cor_xy = pcmdi_metrics.mean_climate.lib.cor_xy(dm_am, do_am) + cor_xy = pcmdi_metrics.mean_climate.lib.cor_xy(dm_am, do_am, var) # CALCULATE ANNUAL OBS and MOD STD - stdObs_xy = pcmdi_metrics.mean_climate.lib.std_xy(do_am) - std_xy = pcmdi_metrics.mean_climate.lib.std_xy(dm_am) + stdObs_xy = pcmdi_metrics.mean_climate.lib.std_xy(do_am, var) + std_xy = pcmdi_metrics.mean_climate.lib.std_xy(dm_am, var) # CALCULATE ANNUAL OBS and MOD MEAN - meanObs_xy = pcmdi_metrics.mean_climate.lib.mean_xy(do_am) - mean_xy = pcmdi_metrics.mean_climate.lib.mean_xy(dm_am) + meanObs_xy = pcmdi_metrics.mean_climate.lib.mean_xy(do_am, var) + mean_xy = pcmdi_metrics.mean_climate.lib.mean_xy(dm_am, var) # ZONAL MEANS ###### # CALCULATE ANNUAL MEANS - dm_amzm, do_amzm = pcmdi_metrics.mean_climate.lib.zonal_mean(dm_am, do_am) + dm_amzm, do_amzm = pcmdi_metrics.mean_climate.lib.zonal_mean(dm_am, do_am, var) # CALCULATE ANNUAL AND ZONAL MEAN RMS - rms_y = pcmdi_metrics.mean_climate.lib.rms_0(dm_amzm, do_amzm) + rms_y = pcmdi_metrics.mean_climate.lib.rms_0(dm_amzm, do_amzm, var) # CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS - dm_amzm_grown, dummy = grower(dm_amzm, dm_am) - dm_am_devzm = MV2.subtract(dm_am, dm_amzm_grown) - do_amzm_grown, dummy = grower(do_amzm, do_am) - do_am_devzm = MV2.subtract(do_am, do_amzm_grown) - rms_xy_devzm = pcmdi_metrics.mean_climate.lib.rms_xy(dm_am_devzm, do_am_devzm) + dm_amzm_grown, dummy = grower(dm_amzm, dm_am, var) + dm_am_devzm = MV2.subtract(dm_am, dm_amzm_grown, var) + do_amzm_grown, dummy = grower(do_amzm, do_am, var) + do_am_devzm = MV2.subtract(do_am, do_amzm_grown, var) + rms_xy_devzm = pcmdi_metrics.mean_climate.lib.rms_xy(dm_am_devzm, do_am_devzm, var) # CALCULATE ANNUAL AND ZONAL MEAN STD # CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD - stdObs_xy_devzm = pcmdi_metrics.mean_climate.lib.std_xy(do_am_devzm) - std_xy_devzm = pcmdi_metrics.mean_climate.lib.std_xy(dm_am_devzm) + stdObs_xy_devzm = pcmdi_metrics.mean_climate.lib.std_xy(do_am_devzm, var) + std_xy_devzm = pcmdi_metrics.mean_climate.lib.std_xy(dm_am_devzm, var) for stat in [ "std-obs_xy", @@ -141,23 +137,23 @@ def compute_metrics(Var, dm, do): # CALCULATE SEASONAL MEANS for sea in ["djf", "mam", "jja", "son"]: - dm_sea = pcmdi_metrics.mean_climate.lib.seasonal_mean(dm, sea) - do_sea = pcmdi_metrics.mean_climate.lib.seasonal_mean(do, sea) + dm_sea = pcmdi_metrics.mean_climate.lib.seasonal_mean(dm, sea, var) + do_sea = pcmdi_metrics.mean_climate.lib.seasonal_mean(do, sea, var) # CALCULATE SEASONAL RMS AND CORRELATION - rms_sea = pcmdi_metrics.mean_climate.lib.rms_xy(dm_sea, do_sea) - rmsc_sea = pcmdi_metrics.mean_climate.lib.rmsc_xy(dm_sea, do_sea) - cor_sea = pcmdi_metrics.mean_climate.lib.cor_xy(dm_sea, do_sea) - mae_sea = pcmdi_metrics.mean_climate.lib.meanabs_xy(dm_sea, do_sea) - bias_sea = pcmdi_metrics.mean_climate.lib.bias_xy(dm_sea, do_sea) + rms_sea = pcmdi_metrics.mean_climate.lib.rms_xy(dm_sea, do_sea, var) + rmsc_sea = pcmdi_metrics.mean_climate.lib.rmsc_xy(dm_sea, do_sea, var) + cor_sea = pcmdi_metrics.mean_climate.lib.cor_xy(dm_sea, do_sea, var) + mae_sea = pcmdi_metrics.mean_climate.lib.meanabs_xy(dm_sea, do_sea, var) + bias_sea = pcmdi_metrics.mean_climate.lib.bias_xy(dm_sea, do_sea, var) # CALCULATE SEASONAL OBS and MOD STD - stdObs_xy_sea = pcmdi_metrics.mean_climate.lib.std_xy(do_sea) - std_xy_sea = pcmdi_metrics.mean_climate.lib.std_xy(dm_sea) + stdObs_xy_sea = pcmdi_metrics.mean_climate.lib.std_xy(do_sea, var) + std_xy_sea = pcmdi_metrics.mean_climate.lib.std_xy(dm_sea, var) # CALCULATE SEASONAL OBS and MOD MEAN - meanObs_xy_sea = pcmdi_metrics.mean_climate.lib.mean_xy(do_sea) - mean_xy_sea = pcmdi_metrics.mean_climate.lib.mean_xy(dm_sea) + meanObs_xy_sea = pcmdi_metrics.mean_climate.lib.mean_xy(do_sea, var) + mean_xy_sea = pcmdi_metrics.mean_climate.lib.mean_xy(dm_sea, var) metrics_dictionary["bias_xy"][sea] = format(bias_sea * conv, sig_digits) metrics_dictionary["rms_xy"][sea] = format(rms_sea * conv, sig_digits) @@ -201,19 +197,19 @@ def compute_metrics(Var, dm, do): do_mo = do[n] # CALCULATE MONTHLY RMS AND CORRELATION - rms_mo = pcmdi_metrics.mean_climate.lib.rms_xy(dm_mo, do_mo) - rmsc_mo = pcmdi_metrics.mean_climate.lib.rmsc_xy(dm_mo, do_mo) - cor_mo = pcmdi_metrics.mean_climate.lib.cor_xy(dm_mo, do_mo) - mae_mo = pcmdi_metrics.mean_climate.lib.meanabs_xy(dm_mo, do_mo) - bias_mo = pcmdi_metrics.mean_climate.lib.bias_xy(dm_mo, do_mo) + rms_mo = pcmdi_metrics.mean_climate.lib.rms_xy(dm_mo, do_mo, var) + rmsc_mo = pcmdi_metrics.mean_climate.lib.rmsc_xy(dm_mo, do_mo, var) + cor_mo = pcmdi_metrics.mean_climate.lib.cor_xy(dm_mo, do_mo, var) + mae_mo = pcmdi_metrics.mean_climate.lib.meanabs_xy(dm_mo, do_mo, var) + bias_mo = pcmdi_metrics.mean_climate.lib.bias_xy(dm_mo, do_mo, var) # CALCULATE MONTHLY OBS and MOD STD - stdObs_xy_mo = pcmdi_metrics.mean_climate.lib.std_xy(do_mo) - std_xy_mo = pcmdi_metrics.mean_climate.lib.std_xy(dm_mo) + stdObs_xy_mo = pcmdi_metrics.mean_climate.lib.std_xy(do_mo, var) + std_xy_mo = pcmdi_metrics.mean_climate.lib.std_xy(dm_mo, var) # CALCULATE MONTHLY OBS and MOD MEAN - meanObs_xy_mo = pcmdi_metrics.mean_climate.lib.mean_xy(do_mo) - mean_xy_mo = pcmdi_metrics.mean_climate.lib.mean_xy(dm_mo) + meanObs_xy_mo = pcmdi_metrics.mean_climate.lib.mean_xy(do_mo, var) + mean_xy_mo = pcmdi_metrics.mean_climate.lib.mean_xy(dm_mo, var) rms_mo_l.append(format(rms_mo * conv, sig_digits)) rmsc_mo_l.append(format(rmsc_mo * conv, sig_digits)) From 3f94d4d4632ca1db5d1091632d5ba5b4c3b89246 Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Tue, 1 Nov 2022 15:20:37 -0700 Subject: [PATCH 048/130] xcdat transition in progress --- .../mean_climate/lib/compute_statistics.py | 99 +++++++++---------- 1 file changed, 48 insertions(+), 51 deletions(-) diff --git a/pcmdi_metrics/mean_climate/lib/compute_statistics.py b/pcmdi_metrics/mean_climate/lib/compute_statistics.py index 08b23c48c..0b2cf50f4 100644 --- a/pcmdi_metrics/mean_climate/lib/compute_statistics.py +++ b/pcmdi_metrics/mean_climate/lib/compute_statistics.py @@ -1,8 +1,10 @@ -#import cdms2 import cdutil import genutil import MV2 import xcdat +import xskillscore as xs +import math +import numpy as np def annual_mean(dm, do, var=None): @@ -11,16 +13,12 @@ def annual_mean(dm, do, var=None): return { "Name": "Annual Mean", "Abstract": "Compute Annual Mean", - "URI": "http://uvcdat.llnl.gov/documentation/" - + "utilities/utilities-2.html", "Contact": "pcmdi-metrics@llnl.gov", "Comments": "Assumes input are 12 months climatology", } - #cdms2.setAutoBounds("on") - #return cdutil.averager(dm, axis="t"), cdutil.averager(do, axis="t") dm_am = dm.temporal.average(var) do_am = do.temporal.average(var) - + return dm_am, do_am def bias_xy(dm, do, var=None): @@ -44,9 +42,8 @@ def bias_xyt(dm, do, var=None): "Abstract": "Compute Full Average of Model - Observation", "Contact": "pcmdi-metrics@llnl.gov", } - dif = MV2.subtract(dm, do) dm['dif'] = dm[var] - do[var] - stat = dm.spatial.average('dif', axis=['X', 'Y']).temporal.average('absdif')['absdif'].values + stat = dm.spatial.average('dif', axis=['X', 'Y']).temporal.average('dif')['dif'].values return float(stat) @@ -56,11 +53,11 @@ def cor_xy(dm, do, var=None): return { "Name": "Spatial Correlation", "Abstract": "Compute Spatial Correlation", - "URI": "http://uvcdat.llnl.gov/documentation/utilities/" - + "utilities-2.html", "Contact": "pcmdi-metrics@llnl.gov", } - return float(genutil.statistics.correlation(dm, do, axis="xy", weights="weighted")) + spatial_weights = dm.spatial.get_weights(axis=['X', 'Y']) + stat = xs.pearson_r(dm[var], do[var], weights=spatial_weights).values + return float(stat) def mean_xy(d, var=None): @@ -71,8 +68,8 @@ def mean_xy(d, var=None): "Abstract": "Area Mean (area weighted)", "Contact": "pcmdi-metrics@llnl.gov", } - mean_xy = d.spatial.average(var, axis=['X', 'Y']).values - return float(mean_xy) + stat = d.spatial.average(var, axis=['X', 'Y'])[var].values + return float(stat) def meanabs_xy(dm, do, var=None): @@ -109,14 +106,11 @@ def rms_0(dm, do, var=None): return { "Name": "Root Mean Square over First Axis", "Abstract": "Compute Root Mean Square over the first axis", - "URI": "http://uvcdat.llnl.gov/documentation/" - + "utilities/utilities-2.html", "Contact": "pcmdi-metrics@llnl.gov", } - if 1 in [x.isLevel() for x in dm.getAxisList()]: - dm = dm(squeeze=1) - do = do(squeeze=1) - return float(genutil.statistics.rms(dm, do)) + dm['diff_square'] = (dm[var] - do[var])**2 + stat = math.sqrt(dm.spatial.average('diff_square', axis=['X', 'Y'])['diff_square'].values) + return float(stat) def rms_xy(dm, do, var=None): @@ -125,11 +119,11 @@ def rms_xy(dm, do, var=None): return { "Name": "Spatial Root Mean Square", "Abstract": "Compute Spatial Root Mean Square", - "URI": "http://uvcdat.llnl.gov/documentation/" - + "utilities/utilities-2.html", "Contact": "pcmdi-metrics@llnl.gov", } - return float(genutil.statistics.rms(dm, do, axis="xy", weights="weighted")) + dm['diff_square'] = (dm[var] - do[var])**2 + stat = math.sqrt(dm.spatial.average('diff_square', axis=['X', 'Y'])['diff_square'].values) + return float(stat) def rms_xyt(dm, do, var=None): @@ -138,11 +132,12 @@ def rms_xyt(dm, do, var=None): return { "Name": "Spatio-Temporal Root Mean Square", "Abstract": "Compute Spatial and Temporal Root Mean Square", - "URI": "http://uvcdat.llnl.gov/documentation/" - + "utilities/utilities-2.html", "Contact": "pcmdi-metrics@llnl.gov", } - return float(genutil.statistics.rms(dm, do, axis="xyt", weights="weighted")) + dm['diff_square'] = (dm[var] - do[var])**2 + dm['diff_square_sqrt'] = np.sqrt(dm.spatial.average('diff_square', axis=['X', 'Y'])['diff_square']) + stat = dm.temporal.average('diff_square_sqrt')['diff_square_sqrt'].values + return float(stat) def rmsc_xy(dm, do, var=None): @@ -151,18 +146,18 @@ def rmsc_xy(dm, do, var=None): return { "Name": "Spatial Root Mean Square", "Abstract": "Compute Centered Spatial Root Mean Square", - "URI": "http://uvcdat.llnl.gov/documentation/" - + "utilities/utilities-2.html", "Contact": "pcmdi-metrics@llnl.gov", } - return float( - genutil.statistics.rms(dm, do, axis="xy", centered=1, weights="weighted") - ) + dm['anomaly'] = dm[var] - dm.spatial.average(var, axis=['X', 'Y'])[var] + do['anomaly'] = do[var] - do.spatial.average(var, axis=['X', 'Y'])[var] + dm['diff_square'] = (dm['anomaly'] - do['anomaly'])**2 + stat = math.sqrt(dm.spatial.average('diff_square', axis=['X', 'Y'])['diff_square'].values) + return float(stat) def seasonal_mean(d, sea, var=None): """Computes SEASONAL MEAN""" - if d is None and sea is None: # just want the doc + if d is None and season is None: # just want the doc return { "Name": "Seasonal Mean", "Abstract": "Compute Seasonal Mean", @@ -172,24 +167,24 @@ def seasonal_mean(d, sea, var=None): mo_wts = [31, 31, 28.25, 31, 30, 31, 30, 31, 31, 30, 31, 30] - if sea == "djf": + if season == "djf": indx = [11, 0, 1] - if sea == "mam": + if season == "mam": indx = [2, 3, 4] - if sea == "jja": + if season == "jja": indx = [5, 6, 7] - if sea == "son": + if season == "son": indx = [8, 9, 10] - sea_no_days = mo_wts[indx[0]] + mo_wts[indx[1]] + mo_wts[indx[2]] - - d_sea = ( - d[indx[0]] * mo_wts[indx[0]] - + d[indx[1]] * mo_wts[indx[1]] - + d[indx[2]] * mo_wts[indx[2]] - ) / sea_no_days + season_num_days = mo_wts[indx[0]] + mo_wts[indx[1]] + mo_wts[indx[2]] + + d_season = ( + d.isel(time=indx[0])[var] * mo_wts[indx[0]] + + d.isel(time=indx[1])[var] * mo_wts[indx[1]] + + d.isel(time=indx[2])[var] * mo_wts[indx[2]] + ) / season_num_days - return d_sea + return d_season def std_xy(d, var=None): @@ -198,11 +193,13 @@ def std_xy(d, var=None): return { "Name": "Spatial Standard Deviation", "Abstract": "Compute Spatial Standard Deviation", - "URI": "http://uvcdat.llnl.gov/documentation/" - + "utilities/utilities-2.html", "Contact": "pcmdi-metrics@llnl.gov", } - return float(genutil.statistics.std(d, axis="xy", weights="weighted")) + average = float(d.spatial.average(var, axis=['X', 'Y'])[var].values) + d['anomaly'] = (d[var] - average)**2 + variance = float(d.spatial.average('anomaly')['anomaly'].values) + std = math.sqrt(variance) + return(std) def std_xyt(d, var=None): @@ -211,11 +208,13 @@ def std_xyt(d, var=None): return { "Name": "Spatial-temporal Standard Deviation", "Abstract": "Compute Space-Time Standard Deviation", - "URI": "http://uvcdat.llnl.gov/documentation/" - + "utilities/utilities-2.html", "Contact": "pcmdi-metrics@llnl.gov", } - return float(genutil.statistics.std(d, axis="xyt", weights="weighted")) + average = float(d.spatial.average(var, axis=['X', 'Y']).temporal.average(var)[var].values) + d['anomaly'] = (d[var] - average)**2 + variance = float(d.spatial.average('anomaly').temporal.average('anomaly')['anomaly'].values) + std = math.sqrt(variance) + return(std) def zonal_mean(dm, do, var=None): @@ -224,8 +223,6 @@ def zonal_mean(dm, do, var=None): return { "Name": "Zonal Mean", "Abstract": "Compute Zonal Mean", - "URI": "http://uvcdat.llnl.gov/documentation/" - + "utilities/utilities-2.html", "Contact": "pcmdi-metrics@llnl.gov", "Comments": "", } From 1b0a585f89e33f1d6004ad93c6379c592c5649d8 Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Wed, 2 Nov 2022 18:49:43 -0700 Subject: [PATCH 049/130] metrics calculations using xcdat --- pcmdi_metrics/mean_climate/lib/compute_statistics.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/pcmdi_metrics/mean_climate/lib/compute_statistics.py b/pcmdi_metrics/mean_climate/lib/compute_statistics.py index 0b2cf50f4..1fbb681dc 100644 --- a/pcmdi_metrics/mean_climate/lib/compute_statistics.py +++ b/pcmdi_metrics/mean_climate/lib/compute_statistics.py @@ -18,7 +18,7 @@ def annual_mean(dm, do, var=None): } dm_am = dm.temporal.average(var) do_am = do.temporal.average(var) - return dm_am, do_am + return dm_am, do_am # DataSets def bias_xy(dm, do, var=None): @@ -226,4 +226,6 @@ def zonal_mean(dm, do, var=None): "Contact": "pcmdi-metrics@llnl.gov", "Comments": "", } - return cdutil.averager(dm, axis="x"), cdutil.averager(do, axis="x") + dm_zm = dm.spatial.average(var, axis=['X']) + do_zm = do.spatial.average(var, axis=['X']) + return dm_zm, do_zm # DataSets \ No newline at end of file From 143f845517e6eed31cbbceab423929b734f56268 Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Wed, 2 Nov 2022 20:04:30 -0700 Subject: [PATCH 050/130] update correlation calculation -- use only xcdat/xarray and exclude xskillscore to minimize dependency --- pcmdi_metrics/mean_climate/lib/compute_statistics.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/pcmdi_metrics/mean_climate/lib/compute_statistics.py b/pcmdi_metrics/mean_climate/lib/compute_statistics.py index 1fbb681dc..afc58b467 100644 --- a/pcmdi_metrics/mean_climate/lib/compute_statistics.py +++ b/pcmdi_metrics/mean_climate/lib/compute_statistics.py @@ -1,8 +1,4 @@ -import cdutil -import genutil -import MV2 import xcdat -import xskillscore as xs import math import numpy as np @@ -56,7 +52,13 @@ def cor_xy(dm, do, var=None): "Contact": "pcmdi-metrics@llnl.gov", } spatial_weights = dm.spatial.get_weights(axis=['X', 'Y']) - stat = xs.pearson_r(dm[var], do[var], weights=spatial_weights).values + dm_avg = float(dm.spatial.average(var, axis=['X', 'Y'])[var].values) + do_avg = float(do.spatial.average(var, axis=['X', 'Y'])[var].values) + + covariance = float(((dm[var] - dm_avg) * (do[var] - do_avg)).cf.weighted(spatial_weights).mean(dim=['lon', 'lat']).values) + std_dm = std_xy(dm, var) + std_do = std_xy(do, var) + stat = covariance / (std_dm * std_do) return float(stat) From c431bd885b2bb0e0eeb104f646a9292e23ff71bd Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Fri, 4 Nov 2022 21:18:37 -0700 Subject: [PATCH 051/130] more efficient calculation --- .../mean_climate/lib/compute_statistics.py | 161 ++++++++++-------- 1 file changed, 93 insertions(+), 68 deletions(-) diff --git a/pcmdi_metrics/mean_climate/lib/compute_statistics.py b/pcmdi_metrics/mean_climate/lib/compute_statistics.py index afc58b467..438ab73be 100644 --- a/pcmdi_metrics/mean_climate/lib/compute_statistics.py +++ b/pcmdi_metrics/mean_climate/lib/compute_statistics.py @@ -17,7 +17,42 @@ def annual_mean(dm, do, var=None): return dm_am, do_am # DataSets -def bias_xy(dm, do, var=None): +def seasonal_mean(d, sea, var=None): + """Computes SEASONAL MEAN""" + if d is None and season is None: # just want the doc + return { + "Name": "Seasonal Mean", + "Abstract": "Compute Seasonal Mean", + "Contact": "pcmdi-metrics@llnl.gov", + "Comments": "Assumes input are 12 months climatology", + } + + mo_wts = [31, 31, 28.25, 31, 30, 31, 30, 31, 31, 30, 31, 30] + + if season == "djf": + indx = [11, 0, 1] + if season == "mam": + indx = [2, 3, 4] + if season == "jja": + indx = [5, 6, 7] + if season == "son": + indx = [8, 9, 10] + + season_num_days = mo_wts[indx[0]] + mo_wts[indx[1]] + mo_wts[indx[2]] + + d_season = ( + d.isel(time=indx[0])[var] * mo_wts[indx[0]] + + d.isel(time=indx[1])[var] * mo_wts[indx[1]] + + d.isel(time=indx[2])[var] * mo_wts[indx[2]] + ) / season_num_days + + return d_season + + +# Metrics calculations + + +def bias_xy(dm, do, var=None, weights=None): """Computes bias""" if dm is None and do is None: # just want the doc return { @@ -25,8 +60,10 @@ def bias_xy(dm, do, var=None): "Abstract": "Compute Full Average of Model - Observation", "Contact": "pcmdi-metrics@llnl.gov", } - dm['dif'] = dm[var] - do[var] - stat = dm.spatial.average('dif', axis=['X', 'Y'])['dif'].values + dif = dm[var] - do[var] + if weights is None: + weights = dm.spatial.get_weights(axis=['X', 'Y']) + stat = float(dif.weighted(weights).mean(("lon", "lat"))) return float(stat) @@ -38,12 +75,13 @@ def bias_xyt(dm, do, var=None): "Abstract": "Compute Full Average of Model - Observation", "Contact": "pcmdi-metrics@llnl.gov", } - dm['dif'] = dm[var] - do[var] - stat = dm.spatial.average('dif', axis=['X', 'Y']).temporal.average('dif')['dif'].values + ds = dm.copy(deep=True) + ds['dif'] = dm[var] - do[var] + stat = ds.spatial.average('dif', axis=['X', 'Y']).temporal.average('dif')['dif'].values return float(stat) -def cor_xy(dm, do, var=None): +def cor_xy(dm, do, var=None, weights=None): """Computes correlation""" if dm is None and do is None: # just want the doc return { @@ -51,18 +89,21 @@ def cor_xy(dm, do, var=None): "Abstract": "Compute Spatial Correlation", "Contact": "pcmdi-metrics@llnl.gov", } - spatial_weights = dm.spatial.get_weights(axis=['X', 'Y']) - dm_avg = float(dm.spatial.average(var, axis=['X', 'Y'])[var].values) - do_avg = float(do.spatial.average(var, axis=['X', 'Y'])[var].values) + if weights is None: + weights = dm.spatial.get_weights(axis=['X', 'Y']) + + dm_avg = dm.spatial.average(var, axis=['X', 'Y'], weights=weights)[var].values + do_avg = do.spatial.average(var, axis=['X', 'Y'], weights=weights)[var].values - covariance = float(((dm[var] - dm_avg) * (do[var] - do_avg)).cf.weighted(spatial_weights).mean(dim=['lon', 'lat']).values) + covariance = ((dm[var] - dm_avg) * (do[var] - do_avg)).weighted(weights).mean(dim=['lon', 'lat']).values std_dm = std_xy(dm, var) std_do = std_xy(do, var) stat = covariance / (std_dm * std_do) + return float(stat) -def mean_xy(d, var=None): +def mean_xy(d, var=None, weights=None): """Computes bias""" if d is None: # just want the doc return { @@ -70,11 +111,14 @@ def mean_xy(d, var=None): "Abstract": "Area Mean (area weighted)", "Contact": "pcmdi-metrics@llnl.gov", } - stat = d.spatial.average(var, axis=['X', 'Y'])[var].values + + if weights is None: + weights = d.spatial.get_weights(axis=['X', 'Y']) + stat = float(d[var].weighted(weights).mean(("lon", "lat"))) return float(stat) -def meanabs_xy(dm, do, var=None): +def meanabs_xy(dm, do, var=None, weights=None): """Computes Mean Absolute Error""" if dm is None and do is None: # just want the doc return { @@ -83,8 +127,10 @@ def meanabs_xy(dm, do, var=None): + "Absolute Difference Between Model And Observation", "Contact": "pcmdi-metrics@llnl.gov", } - dm['absdif'] = abs(dm[var] - do[var]) - stat = dm.spatial.average('absdif', axis=['X', 'Y'])['absdif'].values + if weights is None: + weights = dm.spatial.get_weights(axis=['X', 'Y']) + dif = abs(dm[var] - do[var]) + stat = dif.weighted(weights).mean(("lon", "lat")) return float(stat) @@ -97,8 +143,9 @@ def meanabs_xyt(dm, do, var=None): + "Absolute Difference Between Model And Observation", "Contact": "pcmdi-metrics@llnl.gov", } - dm['absdif'] = abs(dm[var] - do[var]) - stat = dm.spatial.average('absdif', axis=['X', 'Y']).temporal.average('absdif')['absdif'].values + ds = dm.copy(deep=True) + ds['absdif'] = abs(dm[var] - do[var]) + stat = ds.spatial.average('absdif', axis=['X', 'Y']).temporal.average('absdif')['absdif'].values return float(stat) @@ -111,7 +158,7 @@ def rms_0(dm, do, var=None): "Contact": "pcmdi-metrics@llnl.gov", } dm['diff_square'] = (dm[var] - do[var])**2 - stat = math.sqrt(dm.spatial.average('diff_square', axis=['X', 'Y'])['diff_square'].values) + stat = math.sqrt(dm.spatial.average('diff_square', axis=['Y'])['diff_square'].values) return float(stat) @@ -123,8 +170,9 @@ def rms_xy(dm, do, var=None): "Abstract": "Compute Spatial Root Mean Square", "Contact": "pcmdi-metrics@llnl.gov", } - dm['diff_square'] = (dm[var] - do[var])**2 - stat = math.sqrt(dm.spatial.average('diff_square', axis=['X', 'Y'])['diff_square'].values) + dif_square = (dm[var] - do[var])**2 + weights = dm.spatial.get_weights(axis=['X', 'Y']) + stat = math.sqrt(dif_square.weighted(weights).mean(("lon", "lat"))) return float(stat) @@ -136,13 +184,14 @@ def rms_xyt(dm, do, var=None): "Abstract": "Compute Spatial and Temporal Root Mean Square", "Contact": "pcmdi-metrics@llnl.gov", } - dm['diff_square'] = (dm[var] - do[var])**2 - dm['diff_square_sqrt'] = np.sqrt(dm.spatial.average('diff_square', axis=['X', 'Y'])['diff_square']) - stat = dm.temporal.average('diff_square_sqrt')['diff_square_sqrt'].values + ds = dm.copy(deep=True) + ds['diff_square'] = (dm[var] - do[var])**2 + ds['diff_square_sqrt'] = np.sqrt(ds.spatial.average('diff_square', axis=['X', 'Y'])['diff_square']) + stat = ds.temporal.average('diff_square_sqrt')['diff_square_sqrt'].values return float(stat) -def rmsc_xy(dm, do, var=None): +def rmsc_xy(dm, do, var=None, weights=None): """Computes centered rms""" if dm is None and do is None: # just want the doc return { @@ -150,58 +199,33 @@ def rmsc_xy(dm, do, var=None): "Abstract": "Compute Centered Spatial Root Mean Square", "Contact": "pcmdi-metrics@llnl.gov", } - dm['anomaly'] = dm[var] - dm.spatial.average(var, axis=['X', 'Y'])[var] - do['anomaly'] = do[var] - do.spatial.average(var, axis=['X', 'Y'])[var] - dm['diff_square'] = (dm['anomaly'] - do['anomaly'])**2 - stat = math.sqrt(dm.spatial.average('diff_square', axis=['X', 'Y'])['diff_square'].values) + if weights is None: + weights = dm.spatial.get_weights(axis=['X', 'Y']) + + dm_anomaly = dm[var] - dm[var].weighted(weights).mean(("lon", "lat")) + do_anomaly = do[var] - do[var].weighted(weights).mean(("lon", "lat")) + diff_square = (dm_anomaly - do_anomaly)**2 + + stat = math.sqrt(diff_square.weighted(weights).mean(("lon", "lat"))) return float(stat) -def seasonal_mean(d, sea, var=None): - """Computes SEASONAL MEAN""" - if d is None and season is None: # just want the doc - return { - "Name": "Seasonal Mean", - "Abstract": "Compute Seasonal Mean", - "Contact": "pcmdi-metrics@llnl.gov", - "Comments": "Assumes input are 12 months climatology", - } - - mo_wts = [31, 31, 28.25, 31, 30, 31, 30, 31, 31, 30, 31, 30] - - if season == "djf": - indx = [11, 0, 1] - if season == "mam": - indx = [2, 3, 4] - if season == "jja": - indx = [5, 6, 7] - if season == "son": - indx = [8, 9, 10] - - season_num_days = mo_wts[indx[0]] + mo_wts[indx[1]] + mo_wts[indx[2]] - - d_season = ( - d.isel(time=indx[0])[var] * mo_wts[indx[0]] - + d.isel(time=indx[1])[var] * mo_wts[indx[1]] - + d.isel(time=indx[2])[var] * mo_wts[indx[2]] - ) / season_num_days - - return d_season - - -def std_xy(d, var=None): +def std_xy(d, var=None, weights=None): """Computes std""" if d is None: # just want the doc return { "Name": "Spatial Standard Deviation", "Abstract": "Compute Spatial Standard Deviation", "Contact": "pcmdi-metrics@llnl.gov", - } + } + average = float(d.spatial.average(var, axis=['X', 'Y'])[var].values) - d['anomaly'] = (d[var] - average)**2 - variance = float(d.spatial.average('anomaly')['anomaly'].values) + anomaly = (d[var] - average)**2 + if weights is None: + weights = d.spatial.get_weights(axis=['X', 'Y']) + variance = float(anomaly.weighted(weights).mean(("lon", "lat"))) std = math.sqrt(variance) - return(std) + return float(std) def std_xyt(d, var=None): @@ -212,9 +236,10 @@ def std_xyt(d, var=None): "Abstract": "Compute Space-Time Standard Deviation", "Contact": "pcmdi-metrics@llnl.gov", } - average = float(d.spatial.average(var, axis=['X', 'Y']).temporal.average(var)[var].values) - d['anomaly'] = (d[var] - average)**2 - variance = float(d.spatial.average('anomaly').temporal.average('anomaly')['anomaly'].values) + ds = d.copy(deep=True) + average = d.spatial.average(var, axis=['X', 'Y']).temporal.average(var)[var] + ds['anomaly'] = (d[var] - average)**2 + variance = ds.spatial.average('anomaly').temporal.average('anomaly')['anomaly'].values std = math.sqrt(variance) return(std) From ad9406b91f00abfa1b7c2d10f3de514f854f5583 Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Sun, 6 Nov 2022 20:34:42 -0800 Subject: [PATCH 052/130] rms_0 clarification added --- pcmdi_metrics/mean_climate/lib/compute_statistics.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/pcmdi_metrics/mean_climate/lib/compute_statistics.py b/pcmdi_metrics/mean_climate/lib/compute_statistics.py index 438ab73be..a7eb4dfb5 100644 --- a/pcmdi_metrics/mean_climate/lib/compute_statistics.py +++ b/pcmdi_metrics/mean_climate/lib/compute_statistics.py @@ -149,16 +149,20 @@ def meanabs_xyt(dm, do, var=None): return float(stat) -def rms_0(dm, do, var=None): - """Computes rms over first axis""" +def rms_0(dm, do, var=None, weighted=True): + """Computes rms over first axis -- compare two zonal mean fields""" if dm is None and do is None: # just want the doc return { "Name": "Root Mean Square over First Axis", "Abstract": "Compute Root Mean Square over the first axis", "Contact": "pcmdi-metrics@llnl.gov", } - dm['diff_square'] = (dm[var] - do[var])**2 - stat = math.sqrt(dm.spatial.average('diff_square', axis=['Y'])['diff_square'].values) + dif_square = (dm[var] - do[var])**2 + if weighted: + weights = dm.spatial.get_weights(axis=['Y']) + stat = math.sqrt(dif_square.weighted(weights).mean(("lat"))) + else: + stat = math.sqrt(dif_square.mean(("lat"))) return float(stat) From 1b0aa6a051dbca6dbbe9ef172b909e22f504141a Mon Sep 17 00:00:00 2001 From: lee1043 Date: Wed, 30 Nov 2022 15:14:07 -0800 Subject: [PATCH 053/130] update xcdat version --- conda-env/dev.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/conda-env/dev.yml b/conda-env/dev.yml index 91c1e1fd1..9724506d2 100644 --- a/conda-env/dev.yml +++ b/conda-env/dev.yml @@ -19,7 +19,7 @@ dependencies: - eofs=1.4.0 - seaborn=0.11.1 - enso_metrics=1.1.1 - - xcdat=0.3.3 + - xcdat=0.4.0 - xmltodict=0.13.0 # Testing # ================== From cfbdf486a079a0363baf037db310af0434b00218 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Wed, 30 Nov 2022 20:02:54 -0800 Subject: [PATCH 054/130] Enable statistics calculation -- now output saves in json --- pcmdi_metrics/io/base.py | 13 +++++- .../mean_climate/lib/compute_statistics.py | 12 ++++-- .../lib/mean_climate_metrics_calculations.py | 42 ++++++++++++++++++- .../mean_climate/lib/outputmetrics.py | 2 + 4 files changed, 63 insertions(+), 6 deletions(-) diff --git a/pcmdi_metrics/io/base.py b/pcmdi_metrics/io/base.py index 430a4bbab..b9eb65429 100755 --- a/pcmdi_metrics/io/base.py +++ b/pcmdi_metrics/io/base.py @@ -167,6 +167,7 @@ def write( type="json", mode="w", include_YAML=False, + include_history=False, include_script=False, *args, **kwargs, @@ -217,6 +218,10 @@ def write( if "script" in out_dict["provenance"].keys(): del out_dict["provenance"]["script"] + if not include_history: + if "history" in out_dict["provenance"].keys(): + del out_dict["provenance"]["history"] + json.dump(out_dict, f, cls=CDMSDomainsEncoder, *args, **kwargs) f.close() @@ -227,11 +232,14 @@ def write( f.close() elif self.type == "nc": + """ f = cdms2.open(file_name, "w") f.write(data, *args, **kwargs) f.metrics_git_sha1 = pcmdi_metrics.__git_sha1__ f.uvcdat_version = cdat_info.get_version() f.close() + """ + data.to_netcdf(file_name) else: logging.getLogger("pcmdi_metrics").error("Unknown type: %s" % type) @@ -374,7 +382,10 @@ def extract_var_from_file(self, var, var_in_file, *args, **kwargs): extracted_var = var_file(var_in_file, *args, **kwargs) var_file.close() """ - ds = xcdat_open(self(), data_var=var_in_file, decode_times=False) + try: + ds = xcdat_open(self(), data_var=var_in_file, decode_times=True) + except: + ds = xcdat_open(self(), data_var=var_in_file, decode_times=False) # Temporary part to read in cdms written obs4MIP AC files extracted_var = ds return extracted_var diff --git a/pcmdi_metrics/mean_climate/lib/compute_statistics.py b/pcmdi_metrics/mean_climate/lib/compute_statistics.py index a7eb4dfb5..0ad723f67 100644 --- a/pcmdi_metrics/mean_climate/lib/compute_statistics.py +++ b/pcmdi_metrics/mean_climate/lib/compute_statistics.py @@ -17,7 +17,7 @@ def annual_mean(dm, do, var=None): return dm_am, do_am # DataSets -def seasonal_mean(d, sea, var=None): +def seasonal_mean(d, season, var=None): """Computes SEASONAL MEAN""" if d is None and season is None: # just want the doc return { @@ -45,8 +45,11 @@ def seasonal_mean(d, sea, var=None): + d.isel(time=indx[1])[var] * mo_wts[indx[1]] + d.isel(time=indx[2])[var] * mo_wts[indx[2]] ) / season_num_days + + ds_new = d.isel(time=0).copy(deep=True) + ds_new[var] = d_season - return d_season + return ds_new # Metrics calculations @@ -189,9 +192,12 @@ def rms_xyt(dm, do, var=None): "Contact": "pcmdi-metrics@llnl.gov", } ds = dm.copy(deep=True) + print('jwlee-test-rms_xyt-1') ds['diff_square'] = (dm[var] - do[var])**2 ds['diff_square_sqrt'] = np.sqrt(ds.spatial.average('diff_square', axis=['X', 'Y'])['diff_square']) + print('jwlee-test-rms_xyt-2') stat = ds.temporal.average('diff_square_sqrt')['diff_square_sqrt'].values + print('jwlee-test-rms_xyt-3') return float(stat) @@ -259,4 +265,4 @@ def zonal_mean(dm, do, var=None): } dm_zm = dm.spatial.average(var, axis=['X']) do_zm = do.spatial.average(var, axis=['X']) - return dm_zm, do_zm # DataSets \ No newline at end of file + return dm_zm, do_zm # DataSets diff --git a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_calculations.py b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_calculations.py index 590d48b03..eac57b855 100644 --- a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_calculations.py +++ b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_calculations.py @@ -31,9 +31,27 @@ def compute_metrics(Var, dm, do): ) metrics_defs["zonal_mean"] = pcmdi_metrics.mean_climate.lib.zonal_mean(None, None) return metrics_defs + #cdms.setAutoBounds("on") + print('var: ', var) + + # Below is temporary... + do['time'] = dm['time'] + do['time_bnds'] = dm['time_bnds'] + print('do.time: ', do['time']) + + dm.to_netcdf('dm.nc') + do.to_netcdf('do.nc') + + """ + print('jwlee-test-check-calendar') + print("dm.time.encoding['calendar']: ", dm.time.encoding['calendar']) + do.time.encoding['calendar'] = dm.time.encoding['calendar'] + print("do.time.encoding['calendar']: ", do.time.encoding['calendar']) + """ metrics_dictionary = {} + # SET CONDITIONAL ON INPUT VARIABLE if var == "pr": conv = 86400.0 @@ -46,51 +64,71 @@ def compute_metrics(Var, dm, do): sig_digits = ".3f" # CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD + print('jwlee-test-compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD') + print('jwlee-test-compute_metrics, rms_xyt') rms_xyt = pcmdi_metrics.mean_climate.lib.rms_xyt(dm, do, var) + print('jwlee-test-compute_metrics, stdObs_xyt') stdObs_xyt = pcmdi_metrics.mean_climate.lib.std_xyt(do, var) + print('jwlee-test-compute_metrics, std_xyt') std_xyt = pcmdi_metrics.mean_climate.lib.std_xyt(dm, var) # CALCULATE ANNUAL MEANS + print('jwlee-test-compute_metrics-CALCULATE ANNUAL MEANS') dm_am, do_am = pcmdi_metrics.mean_climate.lib.annual_mean(dm, do, var) # CALCULATE ANNUAL MEAN BIAS + print('jwlee-test-compute_metrics-CALCULATE ANNUAL MEAN BIAS') bias_xy = pcmdi_metrics.mean_climate.lib.bias_xy(dm_am, do_am, var) # CALCULATE MEAN ABSOLUTE ERROR + print('jwlee-test-compute_metrics-CALCULATE MSE') mae_xy = pcmdi_metrics.mean_climate.lib.meanabs_xy(dm_am, do_am, var) # CALCULATE ANNUAL MEAN RMS (centered and uncentered) + print('jwlee-test-compute_metrics-CALCULATE MEAN RMS') rms_xy = pcmdi_metrics.mean_climate.lib.rms_xy(dm_am, do_am, var) rmsc_xy = pcmdi_metrics.mean_climate.lib.rmsc_xy(dm_am, do_am, var) # CALCULATE ANNUAL MEAN CORRELATION + print('jwlee-test-compute_metrics-CALCULATE MEAN CORR') cor_xy = pcmdi_metrics.mean_climate.lib.cor_xy(dm_am, do_am, var) # CALCULATE ANNUAL OBS and MOD STD + print('jwlee-test-compute_metrics-CALCULATE ANNUAL OBS AND MOD STD') stdObs_xy = pcmdi_metrics.mean_climate.lib.std_xy(do_am, var) std_xy = pcmdi_metrics.mean_climate.lib.std_xy(dm_am, var) # CALCULATE ANNUAL OBS and MOD MEAN + print('jwlee-test-compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN') meanObs_xy = pcmdi_metrics.mean_climate.lib.mean_xy(do_am, var) mean_xy = pcmdi_metrics.mean_climate.lib.mean_xy(dm_am, var) # ZONAL MEANS ###### # CALCULATE ANNUAL MEANS + print('jwlee-test-compute_metrics-CALCULATE ANNUAL MEANS') dm_amzm, do_amzm = pcmdi_metrics.mean_climate.lib.zonal_mean(dm_am, do_am, var) # CALCULATE ANNUAL AND ZONAL MEAN RMS + print('jwlee-test-compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS') rms_y = pcmdi_metrics.mean_climate.lib.rms_0(dm_amzm, do_amzm, var) # CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS + print('jwlee-test-compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS') + """ dm_amzm_grown, dummy = grower(dm_amzm, dm_am, var) dm_am_devzm = MV2.subtract(dm_am, dm_amzm_grown, var) do_amzm_grown, dummy = grower(do_amzm, do_am, var) do_am_devzm = MV2.subtract(do_am, do_amzm_grown, var) rms_xy_devzm = pcmdi_metrics.mean_climate.lib.rms_xy(dm_am_devzm, do_am_devzm, var) + """ + dm_am_devzm = dm_am - dm_amzm + do_am_devzm = do_am - do_amzm + rms_xy_devzm = pcmdi_metrics.mean_climate.lib.rms_xy(dm_am_devzm, do_am_devzm, var) # CALCULATE ANNUAL AND ZONAL MEAN STD # CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD + print('jwlee-test-compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD') stdObs_xy_devzm = pcmdi_metrics.mean_climate.lib.std_xy(do_am_devzm, var) std_xy_devzm = pcmdi_metrics.mean_climate.lib.std_xy(dm_am_devzm, var) @@ -193,8 +231,8 @@ def compute_metrics(Var, dm, do): "dec", ] ): - dm_mo = dm[n] - do_mo = do[n] + dm_mo = dm.isel(time=n) + do_mo = do.isel(time=n) # CALCULATE MONTHLY RMS AND CORRELATION rms_mo = pcmdi_metrics.mean_climate.lib.rms_xy(dm_mo, do_mo, var) diff --git a/pcmdi_metrics/mean_climate/lib/outputmetrics.py b/pcmdi_metrics/mean_climate/lib/outputmetrics.py index 513196c32..cf926ffcb 100644 --- a/pcmdi_metrics/mean_climate/lib/outputmetrics.py +++ b/pcmdi_metrics/mean_climate/lib/outputmetrics.py @@ -344,6 +344,8 @@ def output_interpolated_model_climatologies(self, test, test_data): clim_file.region = region_name clim_file.realization = self.parameter.realization DataSet.apply_custom_keys(clim_file, self.parameter.custom_keys, self.var) + print('jwlee-test outputmetrics clim_file.write') + print('type(test_data):', type(test_data)) clim_file.write(test_data, type="nc", id=self.var) def get_region_name_from_region(self, region): From 0b57dc9553844fb1aef1270115b54a39fb7e80c2 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Wed, 30 Nov 2022 20:40:51 -0800 Subject: [PATCH 055/130] extract variable at specific level from 4d field --- pcmdi_metrics/io/base.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/pcmdi_metrics/io/base.py b/pcmdi_metrics/io/base.py index b9eb65429..4acff5ec3 100755 --- a/pcmdi_metrics/io/base.py +++ b/pcmdi_metrics/io/base.py @@ -386,6 +386,12 @@ def extract_var_from_file(self, var, var_in_file, *args, **kwargs): ds = xcdat_open(self(), data_var=var_in_file, decode_times=True) except: ds = xcdat_open(self(), data_var=var_in_file, decode_times=False) # Temporary part to read in cdms written obs4MIP AC files + + if 'level' in list(kwargs.keys()): + print("jwlee-test extract_var_from_file kwargs['level']:", kwargs['level']) + level = kwargs['level'] + ds = ds.sel(plev=level) + extracted_var = ds return extracted_var From d53a75a47ba98ffaf58f79da285122cd7854dd83 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Sun, 4 Dec 2022 20:55:19 -0800 Subject: [PATCH 056/130] load regions_specs as function, not exec. some clean ups --- pcmdi_metrics/io/__init__.py | 2 + pcmdi_metrics/io/base.py | 58 ++++++------------- .../mean_climate/lib/compute_statistics.py | 22 +++---- pcmdi_metrics/mean_climate/lib/dataset.py | 3 +- .../lib/mean_climate_metrics_calculations.py | 9 +-- .../lib/mean_climate_metrics_driver.py | 8 +-- pcmdi_metrics/mean_climate/lib/model.py | 5 +- pcmdi_metrics/mean_climate/lib/observation.py | 6 +- .../mean_climate/lib/outputmetrics.py | 24 +++----- pcmdi_metrics/version.py | 4 +- setup.py | 1 + 11 files changed, 51 insertions(+), 91 deletions(-) diff --git a/pcmdi_metrics/io/__init__.py b/pcmdi_metrics/io/__init__.py index 62b92a3a0..9822a2637 100644 --- a/pcmdi_metrics/io/__init__.py +++ b/pcmdi_metrics/io/__init__.py @@ -2,3 +2,5 @@ from .xcdat_openxml import xcdat_open # noqa # isort:skip from . import base # noqa from .base import MV2Json # noqa +from .default_regions_define import load_regions_specs # noqa +from .default_regions_define import region_subset # noqa diff --git a/pcmdi_metrics/io/base.py b/pcmdi_metrics/io/base.py index 4acff5ec3..c756f4cb9 100755 --- a/pcmdi_metrics/io/base.py +++ b/pcmdi_metrics/io/base.py @@ -4,7 +4,6 @@ import logging import os import re -import sys from collections import OrderedDict from collections.abc import Mapping @@ -373,18 +372,10 @@ def get(self, var, var_in_file=None, region={}, *args, **kwargs): def extract_var_from_file(self, var, var_in_file, *args, **kwargs): if var_in_file is None: var_in_file = var - # self.extension = 'nc' - """ - var_file = cdms2.open(self(), "r") - for att in ["var_in_file,", "varInFile"]: - if att in kwargs: - del kwargs[att] - extracted_var = var_file(var_in_file, *args, **kwargs) - var_file.close() - """ + try: ds = xcdat_open(self(), data_var=var_in_file, decode_times=True) - except: + except Exception: ds = xcdat_open(self(), data_var=var_in_file, decode_times=False) # Temporary part to read in cdms written obs4MIP AC files if 'level' in list(kwargs.keys()): @@ -393,7 +384,7 @@ def extract_var_from_file(self, var, var_in_file, *args, **kwargs): ds = ds.sel(plev=level) extracted_var = ds - + return extracted_var def is_masking(self): @@ -416,25 +407,10 @@ def mask_var(self, var): def set_target_grid_and_mask_in_var(self, var, var_in_file): """ self: object - dir(self): ['__abstractmethods__', '__call__', '__class__', '__delattr__', '__dict__', '__dir__', '__doc__', '__eq__', '__format__', '__ge__', '__getattribute__', '__gt__', '__hash__', '__init__', '__init_subclass__', '__le__', '__lt__', '__module__', '__ne__', '__new__', '__reduce__', '__reduce_ex__', '__repr__', '__setattr__', '__sizeof__', '__str__', '__subclasshook__', '__weakref__', '_abc_impl', 'case_id', 'construct', 'ext', 'extract_var_from_file', 'file_mask_template', 'get', 'get_mask_from_var', 'hash', 'is_masking', 'keys', 'mask', 'mask_var', 'model_version', 'period', 'read', 'realization', 'realm', 'region', 'regrid_method', 'regrid_tool', 'reverse', 'root', 'set_domain_in_var', 'set_file_mask_template', 'set_target_grid', 'set_target_grid_and_mask_in_var', 'setup_cdms2', 'table', 'target_grid', 'target_grid_name', 'target_mask', 'template', 'type', 'value', 'var_from_file', 'variable', 'write', 'write_cmec'] self(): string, path to input file """ print('jwlee-test-regrid, set_target_grid_and_mask_in_var start') - #print('jwlee-test-regrid, self.target_grid:', self.target_grid) if self.target_grid is not None: - #print('jwlee-test-regrid, var[var_in_file].shape:', var[var_in_file].shape) - """ - print('jwlee-test-regrid, var.shape:', var.shape) - var = var.regrid( - self.target_grid, - regridTool=self.regrid_tool, - regridMethod=self.regrid_method, - coordSys="deg", - diag={}, - periodicity=1, - ) - """ - #print('jwlee-test-regrid, dir(self):', dir(self)) print('jwlee-test-regrid, type(self):', type(self)) print('jwlee-test-regrid, type(self()):', type(self())) print('jwlee-test-regrid, self():', self()) @@ -442,14 +418,12 @@ def set_target_grid_and_mask_in_var(self, var, var_in_file): var.to_netcdf(self().split('/')[-1].split('.nc')[0] + '_test1-org.nc') var = var.regridder.horizontal(var_in_file, self.target_grid, tool=self.regrid_tool) print('jwlee-test-regrid, regridder done') - var.to_netcdf(self().split('/')[-1].split('.nc')[0]+'_test2-regridded.nc') - + var.to_netcdf(self().split('/')[-1].split('.nc')[0] + '_test2-regridded.nc') print('jwlee-test-regrid-2, var[var_in_file].shape:', var[var_in_file].shape) - print('jwlee-test-regrid-3, self.target_mask:', self.target_mask) if self.target_mask is not None: - #if self.target_mask.shape != var.shape: + # if self.target_mask.shape != var.shape: if self.target_mask.shape != var[var_in_file].shape: dummy, mask = genutil.grower(var, self.target_mask) else: @@ -461,15 +435,17 @@ def set_target_grid_and_mask_in_var(self, var, var_in_file): return var def set_domain_in_var(self, var, region): - domain = region.get("domain", None) - if domain is not None: - if isinstance(domain, dict): - var = var(**domain) - elif isinstance(domain, (list, tuple)): - var = var(*domain) - elif isinstance(domain, cdms2.selectors.Selector): - domain.id = region.get("id", "region") - var = var(*[domain]) + """ + self: + var: + region: , e.g., {'domain': Selector(), 'id': 'NHEX'} + """ + region_id = region['id'] + from pcmdi_metrics.io import load_regions_specs, region_subset + regions_specs = load_regions_specs() + if region_id not in ['global', 'land', 'ocean']: + var = region_subset(var, regions_specs, region=region_id) + return var def set_file_mask_template(self): @@ -498,7 +474,7 @@ def set_target_grid(self, target, regrid_tool="esmf", regrid_method="linear"): self.regrid_method = regrid_method if target == "2.5x2.5": print('jwlee-test, set_target_grid, start') - #self.target_grid = cdms2.createUniformGrid(-88.875, 72, 2.5, 0, 144, 2.5) + # self.target_grid = cdms2.createUniformGrid(-88.875, 72, 2.5, 0, 144, 2.5) self.target_grid = xcdat.create_uniform_grid(-88.875, 88.625, 2.5, 0, 357.5, 2.5) self.target_grid_name = target elif cdms2.isGrid(target): diff --git a/pcmdi_metrics/mean_climate/lib/compute_statistics.py b/pcmdi_metrics/mean_climate/lib/compute_statistics.py index 0ad723f67..850e3263b 100644 --- a/pcmdi_metrics/mean_climate/lib/compute_statistics.py +++ b/pcmdi_metrics/mean_climate/lib/compute_statistics.py @@ -1,5 +1,5 @@ -import xcdat import math + import numpy as np @@ -39,13 +39,13 @@ def seasonal_mean(d, season, var=None): indx = [8, 9, 10] season_num_days = mo_wts[indx[0]] + mo_wts[indx[1]] + mo_wts[indx[2]] - + d_season = ( d.isel(time=indx[0])[var] * mo_wts[indx[0]] + d.isel(time=indx[1])[var] * mo_wts[indx[1]] + d.isel(time=indx[2])[var] * mo_wts[indx[2]] ) / season_num_days - + ds_new = d.isel(time=0).copy(deep=True) ds_new[var] = d_season @@ -94,15 +94,15 @@ def cor_xy(dm, do, var=None, weights=None): } if weights is None: weights = dm.spatial.get_weights(axis=['X', 'Y']) - + dm_avg = dm.spatial.average(var, axis=['X', 'Y'], weights=weights)[var].values do_avg = do.spatial.average(var, axis=['X', 'Y'], weights=weights)[var].values - + covariance = ((dm[var] - dm_avg) * (do[var] - do_avg)).weighted(weights).mean(dim=['lon', 'lat']).values std_dm = std_xy(dm, var) std_do = std_xy(do, var) stat = covariance / (std_dm * std_do) - + return float(stat) @@ -114,7 +114,7 @@ def mean_xy(d, var=None, weights=None): "Abstract": "Area Mean (area weighted)", "Contact": "pcmdi-metrics@llnl.gov", } - + if weights is None: weights = d.spatial.get_weights(axis=['X', 'Y']) stat = float(d[var].weighted(weights).mean(("lon", "lat"))) @@ -196,7 +196,7 @@ def rms_xyt(dm, do, var=None): ds['diff_square'] = (dm[var] - do[var])**2 ds['diff_square_sqrt'] = np.sqrt(ds.spatial.average('diff_square', axis=['X', 'Y'])['diff_square']) print('jwlee-test-rms_xyt-2') - stat = ds.temporal.average('diff_square_sqrt')['diff_square_sqrt'].values + stat = ds.temporal.average('diff_square_sqrt')['diff_square_sqrt'].values print('jwlee-test-rms_xyt-3') return float(stat) @@ -211,7 +211,7 @@ def rmsc_xy(dm, do, var=None, weights=None): } if weights is None: weights = dm.spatial.get_weights(axis=['X', 'Y']) - + dm_anomaly = dm[var] - dm[var].weighted(weights).mean(("lon", "lat")) do_anomaly = do[var] - do[var].weighted(weights).mean(("lon", "lat")) diff_square = (dm_anomaly - do_anomaly)**2 @@ -227,8 +227,8 @@ def std_xy(d, var=None, weights=None): "Name": "Spatial Standard Deviation", "Abstract": "Compute Spatial Standard Deviation", "Contact": "pcmdi-metrics@llnl.gov", - } - + } + average = float(d.spatial.average(var, axis=['X', 'Y'])[var].values) anomaly = (d[var] - average)**2 if weights is None: diff --git a/pcmdi_metrics/mean_climate/lib/dataset.py b/pcmdi_metrics/mean_climate/lib/dataset.py index 537908458..7253c899d 100644 --- a/pcmdi_metrics/mean_climate/lib/dataset.py +++ b/pcmdi_metrics/mean_climate/lib/dataset.py @@ -102,8 +102,9 @@ def create_sftlf(parameter): else: t_grid = parameter.target_grid print('jwlee-test-target_grid-create done') + print('jwlee-test-target_grid-create t_grid:', t_grid) - #sft = cdutil.generateLandSeaMask(t_grid) + # sft = cdutil.generateLandSeaMask(t_grid) sft = cdutil.generateLandSeaMask(t_grid_cdms2) sft[:] = sft.filled(1.0) * 100.0 sftlf["target_grid"] = sft diff --git a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_calculations.py b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_calculations.py index eac57b855..1c7528ab2 100644 --- a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_calculations.py +++ b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_calculations.py @@ -1,10 +1,6 @@ - -#import cdms2 as cdms -#import MV2 -#from genutil import grower +import collections import pcmdi_metrics -import collections def compute_metrics(Var, dm, do): @@ -32,7 +28,7 @@ def compute_metrics(Var, dm, do): metrics_defs["zonal_mean"] = pcmdi_metrics.mean_climate.lib.zonal_mean(None, None) return metrics_defs - #cdms.setAutoBounds("on") + # cdms.setAutoBounds("on") print('var: ', var) # Below is temporary... @@ -51,7 +47,6 @@ def compute_metrics(Var, dm, do): """ metrics_dictionary = {} - # SET CONDITIONAL ON INPUT VARIABLE if var == "pr": conv = 86400.0 diff --git a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py index c6cba41a8..ba14840bb 100644 --- a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py +++ b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py @@ -104,7 +104,8 @@ def load_default_regions_and_regions_specs(self): from default_regions.py and stores them as attributes.""" default_regions_file = ( DataSet.load_path_as_file_obj( - "default_regions.py" + # "default_regions.py" + "default_regions_xcdat.py" ) ) exec( @@ -212,13 +213,11 @@ def run_reference_and_test_comparison(self): ) break - #try: - if 1: + try: print('jwlee-test-2: type(self), ref, tst:', type(self), ref, tst) print('jwlee-test-2: self.var, self.var_name_long:', self.var, self.var_name_long) print('jwlee-test-2: tst()[self.var].shape:', tst()[self.var].shape) self.output_metric.calculate_and_output_metrics(ref, tst) - """ except RuntimeError: continue except Exception as err: @@ -229,7 +228,6 @@ def run_reference_and_test_comparison(self): ) logging.getLogger("pcmdi_metrics").info(err_msg) break - """ def is_data_set_obs(self, data_set): """Is data_set (which is either a test or reference) an obs?""" diff --git a/pcmdi_metrics/mean_climate/lib/model.py b/pcmdi_metrics/mean_climate/lib/model.py index ed74cbd0d..850ac872b 100644 --- a/pcmdi_metrics/mean_climate/lib/model.py +++ b/pcmdi_metrics/mean_climate/lib/model.py @@ -59,8 +59,7 @@ def get(self): """Gets the variable based on the region and level (if given) for the file from data_path, which is defined in the initalizer.""" print('jwlee-test-get: self.var_in_file, self.region:', self.var_in_file, self.region) - #try: - if 1: + try: if self.level is None: data_model = self._model_file.get( self.var, var_in_file=self.var_in_file, region=self.region @@ -75,14 +74,12 @@ def get(self): return data_model - """ except Exception as e: msg = "Failed to get variables %s for versions: %s, error: %s" logging.getLogger("pcmdi_metrics").error( msg % (self.var, self.obs_or_model, e) ) raise RuntimeError("Need to skip model: %s" % self.obs_or_model) - """ def get_var_in_file(self): """Based off the model_tweaks parameter, get the variable mapping.""" diff --git a/pcmdi_metrics/mean_climate/lib/observation.py b/pcmdi_metrics/mean_climate/lib/observation.py index ba3e87534..2d7faea1c 100644 --- a/pcmdi_metrics/mean_climate/lib/observation.py +++ b/pcmdi_metrics/mean_climate/lib/observation.py @@ -131,8 +131,7 @@ def setup_target_mask(self): def get(self): """Gets the variable based on the region and level (if given) for the file from data_path, which is defined in the initializer.""" - #try: - if 1: + try: print('jwlee-test-observation-get, self.level:', self.level) if self.level is not None: data_obs = self._obs_file.get( @@ -141,7 +140,6 @@ def get(self): else: data_obs = self._obs_file.get(self.var, region=self.region) return data_obs - """ except Exception as e: if self.level is not None: logging.getLogger("pcmdi_metrics").error( @@ -155,7 +153,7 @@ def get(self): "Failed opening 3D OBS", self.var, self.obs_or_model, e ) ) - """ + def hash(self): """Return a hash of the file.""" return self._obs_file.hash() diff --git a/pcmdi_metrics/mean_climate/lib/outputmetrics.py b/pcmdi_metrics/mean_climate/lib/outputmetrics.py index cf926ffcb..05a6fe434 100644 --- a/pcmdi_metrics/mean_climate/lib/outputmetrics.py +++ b/pcmdi_metrics/mean_climate/lib/outputmetrics.py @@ -6,9 +6,9 @@ from pcmdi_metrics import LOG_LEVEL from pcmdi_metrics.io.base import Base +from pcmdi_metrics.mean_climate.lib import compute_metrics from pcmdi_metrics.mean_climate.lib.dataset import DataSet from pcmdi_metrics.mean_climate.lib.observation import Observation -from pcmdi_metrics.mean_climate.lib import compute_metrics try: basestring # noqa @@ -124,16 +124,13 @@ def calculate_and_output_metrics(self, ref, test): ref_data = None - #try: - if 1: + try: ref_data = ref() - """ except Exception as e: msg = "Error while processing observation %s for variables %s:\n\t%s" logging.getLogger("pcmdi_metrics").error( msg % (ref.obs_or_model, self.var, str(e)) ) - """ if ref_data is None: # Something went bad! raise RuntimeError("Could not load reference {}".format(ref.obs_or_model)) @@ -146,14 +143,9 @@ def calculate_and_output_metrics(self, ref, test): raise RuntimeError("Need to skip model: %s" % test.obs_or_model) # Todo: Make this a fcn - #print('jwlee-test-calculate_and_output_metrics, type(test):', type(test)) - #print('jwlee-test-2-1, test().shape:', test().shape) - #print('jwlee-test-2-2, test_data.shape:', test_data.shape) print('jwlee-test-calculate_and_output_metrics, grid_in_metrics_dict start') self.set_grid_in_metrics_dictionary(test_data, self.var) print('jwlee-test-calculate_and_output_metrics, grid_in_metrics_dict done') - #print('jwlee-test-2-3, test_data.shape:', test_data.shape) - print('jwlee-test type(ref_data), type(test_data):', type(ref_data), type(test_data)) print('jwlee-test ref_data:', ref_data) print('jwlee-test test_data:', test_data) @@ -162,13 +154,13 @@ def calculate_and_output_metrics(self, ref, test): print('jwlee-test ref_data[self.var].shape:', ref_data[self.var].shape) print('jwlee-test test_data[self.var].shape:', test_data[self.var].shape) - #if ref_data.shape != test_data.shape: + # if ref_data.shape != test_data.shape: if ref_data[self.var].shape != test_data[self.var].shape: print('jwlee-test raise runtime error') raise RuntimeError( "Two data sets have different shapes. {} vs {}".format( - str(ref_data[self.var].shape), str(test_data[self.var].shape)) - #% (ref_data.shape, test_data.shape) + str(ref_data[self.var].shape), str(test_data[self.var].shape)) + # % (ref_data.shape, test_data.shape) ) print('jwlee-test-calculate_and_output_metrics, set_simulation_desc start') @@ -190,7 +182,7 @@ def calculate_and_output_metrics(self, ref, test): if not self.parameter.dry_run: print('jwlee-test-calculate_and_output_metrics, compute_metrics start') print('jwlee-test-calculate_and_output_metrics, self.var_name_long:', self.var_name_long) - + pr_rgn = compute_metrics( self.var_name_long, test_data, ref_data ) @@ -243,8 +235,8 @@ def set_grid_in_metrics_dictionary(self, test_data, var): grid["GridName"] = self.parameter.target_grid print('jwlee-test set_grid_in_metrics_dictionary middle') print('jwlee-test var:', var) - #print('jwlee-test dir(test_data):', dir(test_data)) - #grid["GridResolution"] = test_data.shape[1:] + # print('jwlee-test dir(test_data):', dir(test_data)) + # grid["GridResolution"] = test_data.shape[1:] grid["GridResolution"] = test_data[var].shape[1:] self.metrics_dictionary["GridInfo"] = grid print('jwlee-test set_grid_in_metrics_dictionary done') diff --git a/pcmdi_metrics/version.py b/pcmdi_metrics/version.py index b9f2d857b..883fe4d81 100644 --- a/pcmdi_metrics/version.py +++ b/pcmdi_metrics/version.py @@ -1,3 +1,3 @@ __version__ = 'v2.3.1' -__git_tag_describe__ = 'v2.3.1-56-g4857338' -__git_sha1__ = '48573380a221a80d0763908a8454d3459e04757e' +__git_tag_describe__ = 'v2.3.1-66-g0b57dc9' +__git_sha1__ = '0b57dc9553844fb1aef1270115b54a39fb7e80c2' diff --git a/setup.py b/setup.py index 155df2524..5d53b57a1 100755 --- a/setup.py +++ b/setup.py @@ -103,6 +103,7 @@ "share/test_data_files.txt", "share/cmip_model_list.json", "share/default_regions.py", + #"share/default_regions_xcdat.py", "share/DefArgsCIA.json", ), ), From 70bccd098e921f000fe5c6f3df9ce4bf5464eede Mon Sep 17 00:00:00 2001 From: lee1043 Date: Mon, 5 Dec 2022 09:38:10 -0800 Subject: [PATCH 057/130] load regions_specs as like function, for xcdat usage --- pcmdi_metrics/io/default_regions_define.py | 89 ++++++++++++++++++++++ 1 file changed, 89 insertions(+) create mode 100755 pcmdi_metrics/io/default_regions_define.py diff --git a/pcmdi_metrics/io/default_regions_define.py b/pcmdi_metrics/io/default_regions_define.py new file mode 100755 index 000000000..18dac1193 --- /dev/null +++ b/pcmdi_metrics/io/default_regions_define.py @@ -0,0 +1,89 @@ +def load_regions_specs(): + + regions_specs = { + # Mean Climate + "NHEX": {"domain": {"latitude": (30.0, 90)}}, + "SHEX": {"domain": {"latitude": (-90.0, -30)}}, + "TROPICS": {"domain": {"latitude": (-30.0, 30)}}, + "global": {}, + "90S50S": {"domain": {"latitude": (-90.0, -50)}}, + "50S20S": {"domain": {"latitude": (-50.0, -20)}}, + "20S20N": {"domain": {"latitude": (-20.0, 20)}}, + "20N50N": {"domain": {"latitude": (20.0, 50)}}, + "50N90N": {"domain": {"latitude": (50.0, 90)}}, + "land_NHEX": {"value": 100, "domain": {"latitude": (30.0, 90)}}, + "land_SHEX": {"value": 100, "domain": {"latitude": (-90.0, -30)}}, + "land_TROPICS": {"value": 100, "domain": {"latitude": (-30.0, 30)}}, + "land": {"value": 100}, + "ocean_NHEX": {"value": 0, "domain": {"latitude": (30.0, 90)}}, + "ocean_SHEX": {"value": 0, "domain": {"latitude": (-90.0, -30)}}, + "ocean_TROPICS": {"value": 0, "domain": {"latitude": (30.0, 30)}}, + "ocean": {"value": 0}, + # Modes of variability + "NAM": {"domain": {"latitude": (20.0, 90), "longitude": (-180, 180)}}, + "NAO": {"domain": {"latitude": (20.0, 80), "longitude": (-90, 40)}}, + "SAM": {"domain": {"latitude": (-20.0, -90), "longitude": (0, 360)}}, + "PNA": {"domain": {"latitude": (20.0, 85), "longitude": (120, 240)}}, + "PDO": {"domain": {"latitude": (20.0, 70), "longitude": (110, 260)}}, + # Monsoon domains for Wang metrics + # All monsoon domains + "AllMW": {"domain": {"latitude": (-40.0, 45.0), "longitude": (0.0, 360.0)}}, + "AllM": {"domain": {"latitude": (-45.0, 45.0), "longitude": (0.0, 360.0)}}, + # North American Monsoon + "NAMM": {"domain": {"latitude": (0.0, 45.0), "longitude": (210.0, 310.0)}}, + # South American Monsoon + "SAMM": {"domain": {"latitude": (-45.0, 0.0), "longitude": (240.0, 330.0)}}, + # North African Monsoon + "NAFM": {"domain": {"latitude": (0.0, 45.0), "longitude": (310.0, 60.0)}}, + # South African Monsoon + "SAFM": {"domain": {"latitude": (-45.0, 0.0), "longitude": (0.0, 90.0)}}, + # Asian Summer Monsoon + "ASM": {"domain": {"latitude": (0.0, 45.0), "longitude": (60.0, 180.0)}}, + # Australian Monsoon + "AUSM": {"domain": {"latitude": (-45.0, 0.0), "longitude": (90.0, 160.0)}}, + # Monsoon domains for Sperber metrics + # All India rainfall + "AIR": {"domain": {"latitude": (7.0, 25.0), "longitude": (65.0, 85.0)}}, + # North Australian + "AUS": {"domain": {"latitude": (-20.0, -10.0), "longitude": (120.0, 150.0)}}, + # Sahel + "Sahel": {"domain": {"latitude": (13.0, 18.0), "longitude": (-10.0, 10.0)}}, + # Gulf of Guinea + "GoG": {"domain": {"latitude": (0.0, 5.0), "longitude": (-10.0, 10.0)}}, + # North American monsoon + "NAmo": {"domain": {"latitude": (20.0, 37.0), "longitude": (-112.0, -103.0)}}, + # South American monsoon + "SAmo": {"domain": {"latitude": (-20.0, 2.5), "longitude": (-65.0, -40.0)}}, + } + + return regions_specs + + +def region_subset(d, regions_specs, region=None): + """ + d: xarray.Dataset + regions_specs: dict + region: string + """ + + if ((region is None) or ((region is not None) and (region not in list(regions_specs.keys())))): + print('Error: region not defined') + else: + if 'domain' in list(regions_specs[region].keys()): + if 'latitude' in list(regions_specs[region]['domain'].keys()): + lat0 = regions_specs[region]['domain']['latitude'][0] + lat1 = regions_specs[region]['domain']['latitude'][1] + if 'latitude' in (d.coords.dims): + d = d.sel(latitude=slice(lat0, lat1)) + elif 'lat' in (d.coords.dims): + d = d.sel(lat=slice(lat0, lat1)) + + if 'longitude' in list(regions_specs[region]['domain'].keys()): + lon0 = regions_specs[region]['domain']['longitude'][0] + lon1 = regions_specs[region]['domain']['longitude'][1] + if 'longitude' in (d.coords.dims): + d = d.sel(longitude=slice(lon0, lon1)) + elif 'lon' in (d.coords.dims): + d = d.sel(lon=slice(lon0, lon1)) + + return d From 5cd347dd0cf17b3be04cd627876227fb004f66a3 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Wed, 7 Dec 2022 15:21:42 -0800 Subject: [PATCH 058/130] work toward add new driver ... --- pcmdi_metrics/io/base.py | 23 ++- pcmdi_metrics/mean_climate/lib/dataset.py | 14 +- .../lib/mean_climate_metrics_driver.py | 4 +- pcmdi_metrics/mean_climate/lib/model.py | 8 ++ .../mean_climate/mean_climate_driver_new.py | 136 ++++++++++++++++++ pcmdi_metrics/version.py | 4 +- setup.py | 1 + 7 files changed, 183 insertions(+), 7 deletions(-) create mode 100755 pcmdi_metrics/mean_climate/mean_climate_driver_new.py diff --git a/pcmdi_metrics/io/base.py b/pcmdi_metrics/io/base.py index c756f4cb9..177572197 100755 --- a/pcmdi_metrics/io/base.py +++ b/pcmdi_metrics/io/base.py @@ -394,10 +394,24 @@ def is_masking(self): return False def mask_var(self, var): + """ + self: + var: + """ + print('jwlee-test-mask_var, self, var:', self, var) + print('jwlee-test-mask_var, type(self)', type(self)) + print('jwlee-test-mask_var, type(var)', type(var)) + print('jwlee-test-mask_var, self.mask', self.mask) + print('jwlee-test-mask_var, type(self.mask)', type(self.mask)) # cdms2.tvariable.TransientVariable + print('jwlee-test-mask_var, self.mask.shape', self.mask.shape) + print("jwlee-test-mask_var, tuple(var.dims[d] for d in ['lat', 'lon']):", tuple(var.dims[d] for d in ['lat', 'lon'])) + var_shape = tuple(var.dims[d] for d in ['lat', 'lon']) + if self.mask is None: self.set_file_mask_template() self.mask = self.get_mask_from_var(var) - if self.mask.shape != var.shape: + #if self.mask.shape != var.shape: + if self.mask.shape != var_shape: dummy, mask = genutil.grower(var, self.mask) else: mask = self.target_mask @@ -458,7 +472,12 @@ def set_file_mask_template(self): def get_mask_from_var(self, var): try: - o_mask = self.file_mask_template.get("sftlf") + print('jwlee-test-get_mask_from_var start') + #o_mask = self.file_mask_template.get("sftlf") + o_mask = self.file_mask_template.get("sftlf", var_in_file="sftlf") + print('jwlee-test-get_mask_from_var, self.file_mask_template:', self.file_mask_template) + print('jwlee-test-get_mask_from_var, type(o_mask):', type(o_mask)) + print('jwlee-test-get_mask_from_var, o_mask.shape:', o_mask.shape) except Exception: o_mask = ( cdutil.generateLandSeaMask(var, regridTool=self.regrid_tool).filled(1.0) diff --git a/pcmdi_metrics/mean_climate/lib/dataset.py b/pcmdi_metrics/mean_climate/lib/dataset.py index 7253c899d..0b259675f 100644 --- a/pcmdi_metrics/mean_climate/lib/dataset.py +++ b/pcmdi_metrics/mean_climate/lib/dataset.py @@ -73,10 +73,12 @@ def create_sftlf(parameter): """Create the sftlf file from the parameter.""" sftlf = {} + print('jwlee-test_create_sftlf, parameter.test_data_set:', parameter.test_data_set) for test in parameter.test_data_set: tmp_name = getattr(parameter, "sftlf_filename_template") if tmp_name is None: # Not defined from commandline or param file tmp_name = parameter.filename_template + print('jwlee-test_create_sftlf, tmp_name:', tmp_name) sft = Base(parameter.test_data_path, tmp_name) sft.model_version = test sft.table = "fx" @@ -87,14 +89,22 @@ def create_sftlf(parameter): sft.target_grid = None sft.realization = "r0i0p0" DataSet.apply_custom_keys(sft, parameter.custom_keys, "sftlf") - try: + if 1: + #try: + print('jwlee-test_create_sftlf, chk1') sftlf[test] = {"raw": sft.get("sftlf")} + print('jwlee-test_create_sftlf, chk1-2') sftlf[test]["filename"] = os.path.basename(sft()) + print('jwlee-test_create_sftlf, chk1-3') sftlf[test]["md5"] = sft.hash() + print('jwlee-test_create_sftlf, chk1-4') + """ except Exception: + print('jwlee-test_create_sftlf, chk2') sftlf[test] = {"raw": None} sftlf[test]["filename"] = None sftlf[test]["md5"] = None + """ print('jwlee-test-target_grid-create') if parameter.target_grid == "2.5x2.5": t_grid_cdms2 = cdms2.createUniformGrid(-88.875, 72, 2.5, 0, 144, 2.5) @@ -110,6 +120,8 @@ def create_sftlf(parameter): sftlf["target_grid"] = sft print('jwlee-test-target_grid, type(sft), sft.shape:', type(sft), sft.shape) + print("jwlee-test_create_sftlf, sftlf[test]['raw']:", sftlf[test]['raw']) + return sftlf @staticmethod diff --git a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py index ba14840bb..f0e247fc3 100644 --- a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py +++ b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py @@ -104,8 +104,8 @@ def load_default_regions_and_regions_specs(self): from default_regions.py and stores them as attributes.""" default_regions_file = ( DataSet.load_path_as_file_obj( - # "default_regions.py" - "default_regions_xcdat.py" + "default_regions.py" + # "default_regions_xcdat.py" ) ) exec( diff --git a/pcmdi_metrics/mean_climate/lib/model.py b/pcmdi_metrics/mean_climate/lib/model.py index 850ac872b..8a052d128 100644 --- a/pcmdi_metrics/mean_climate/lib/model.py +++ b/pcmdi_metrics/mean_climate/lib/model.py @@ -44,9 +44,17 @@ def setup_target_mask(self): """Sets the mask and target_mask attribute of self._model_file""" self.var_in_file = self.get_var_in_file() + print('jwlee-test-setup_target_mask, self.var_in_file:', self.var_in_file) + print('jwlee-test-setup_target_mask, self.region:', self.region) + print('jwlee-test-setup_target_mask, self.obs_or_model:', self.obs_or_model) + if self.region is not None: region_value = self.region.get("value", None) + print('jwlee-test-setup_target_mask, region_value:', region_value) if region_value is not None: + print('jwlee-test-setup_target_mask, self.sftlf:', self.sftlf) + print('jwlee-test-setup_target_mask, self.sftlf[self.obs_or_model]:', self.sftlf[self.obs_or_model]) + print('jwlee-test-setup_target_mask, self.sftlf[self.obs_or_model]["raw"]:', self.sftlf[self.obs_or_model]["raw"]) if self.sftlf[self.obs_or_model]["raw"] is None: self.create_sftlf_model_raw(self.var_in_file) diff --git a/pcmdi_metrics/mean_climate/mean_climate_driver_new.py b/pcmdi_metrics/mean_climate/mean_climate_driver_new.py new file mode 100755 index 000000000..8c9c99b23 --- /dev/null +++ b/pcmdi_metrics/mean_climate/mean_climate_driver_new.py @@ -0,0 +1,136 @@ +#!/usr/bin/env python + +import json +import os +from re import split + +import cdms2 +import cdutil +import xcdat + +from pcmdi_metrics import resources +from pcmdi_metrics.io import xcdat_open +from pcmdi_metrics.mean_climate.lib import create_mean_climate_parser + +parser = create_mean_climate_parser() +parameter = parser.get_parameter(cmd_default_vars=False, argparse_vals_only=False) + +""" +dir(parameter): ['__add__', '__class__', '__delattr__', '__dict__', '__dir__', '__doc__', '__eq__', '__format__', '__ge__', '__getattribute__', '__gt__', '__hash__', '__init__', '__init_subclass__', '__le__', '__lt__', '__module__', '__ne__', '__new__', '__reduce__', '__reduce_ex__', '__repr__', '__setattr__', '__sizeof__', '__str__', '__subclasshook__', '__weakref__', 'case_id', 'check_case_id', 'check_custom_keys', 'check_custom_observations_path', 'check_filename_output_template', 'check_filename_template', 'check_generate_surface_type_land_fraction', 'check_metrics_output_path', 'check_period', 'check_realization', 'check_ref', 'check_reference_data_path', 'check_reference_data_set', 'check_regions', 'check_regions_specs', 'check_regions_values', 'check_regrid_method', 'check_regrid_method_ocn', 'check_regrid_tool', 'check_regrid_tool_ocn', 'check_save_test_clims', 'check_str', 'check_str_seq_in_str_list', 'check_str_var_in_str_list', 'check_surface_type_land_fraction_filename_template', 'check_target_grid', 'check_test_clims_interpolated_output', 'check_test_data_path', 'check_test_data_set', 'check_values', 'check_vars', 'custom_keys', 'custom_observations_path', 'dry_run', 'filename_output_template', 'filename_template', 'generate_sftlf', 'generate_surface_type_land_fraction', 'import_user_parameter_file_as_module', 'load_parameter_from_py', 'load_parameters_from_module', 'metrics_output_path', 'os', 'output_json_template', 'period', 'r', 'realization', 'reference_data_path', 'reference_data_set', 'regions', 'regions_specs', 'regions_values', 'regrid_method', 'regrid_method_ocn', 'regrid_tool', 'regrid_tool_ocn', 'save_test_clims', 'sftlf_filename_template', 'surface_type_land_fraction_filename_template', 't', 'target_grid', 'test_clims_interpolated_output', 'test_data_path', 'test_data_set', 'user_notes', 'v', 'vars'] +""" + +case_id = parameter.case_id +test_data_set = parameter.test_data_set +vars = parameter.vars +reference_data_set = parameter.reference_data_set +target_grid = parameter.target_grid +regrid_tool = parameter.regrid_tool +regrid_method = parameter.regrid_method +regrid_tool_ocn = parameter.regrid_tool_ocn +save_test_clims = parameter.save_test_clims +test_clims_interpolated_output = parameter.test_clims_interpolated_output +filename_template = parameter.filename_template +sftlf_filename_template = parameter.sftlf_filename_template +generate_sftlf = parameter.generate_sftlf +regions = parameter.regions +test_data_path = parameter.test_data_path +reference_data_path = parameter.reference_data_path +metrics_output_path = parameter.metrics_output_path + +print( + 'case_id: ', case_id, '\n', + 'test_data_set:', test_data_set, '\n', + 'vars:', vars, '\n', + 'reference_data_set:', reference_data_set, '\n', + 'target_grid:', target_grid, '\n', + 'regrid_tool:', regrid_tool, '\n', + 'regrid_method:', regrid_method, '\n', + 'regrid_tool_ocn:', regrid_tool_ocn, '\n', + 'save_test_clims:', save_test_clims, '\n', + 'test_clims_interpolated_output:', test_clims_interpolated_output, '\n', + 'filename_template:', filename_template, '\n', + 'sftlf_filename_template:', sftlf_filename_template, '\n', + 'generate_sftlf:', generate_sftlf, '\n', + 'regions:', regions, '\n', + 'test_data_path:', test_data_path, '\n', + 'reference_data_path:', reference_data_path, '\n', + 'metrics_output_path:', metrics_output_path, '\n') + +default_regions = ['global', 'NHEX', 'SHEX', 'TROPICS'] + +print('--- start mean climate metrics calculation ---') + +# generate target grid +if target_grid == "2.5x2.5": + t_grid = xcdat.create_uniform_grid(-88.875, 88.625, 2.5, 0, 357.5, 2.5) + t_grid_cdms2 = cdms2.createUniformGrid(-88.875, 72, 2.5, 0, 144, 2.5) + sft = cdutil.generateLandSeaMask(t_grid_cdms2) + +# load obs catalogue json +egg_pth = resources.resource_path() +obs_file_name = "obs_info_dictionary.json" +obs_file_path = os.path.join(egg_pth, obs_file_name) +with open(obs_file_path) as fo: + obs_dict = json.loads(fo.read()) +# print('obs_dict:', obs_dict) + +# ------------- +# variable loop +# ------------- +for var in vars: + + if '_' in var or '-' in var: + varname = split('_|-', var)[0] + level = float(split('_|-', var)[1]) * 100 # hPa to Pa + else: + varname = var + level = None + + if varname not in list(regions.keys()): + regions[varname] = default_regions + + print('varname:', varname) + print('level:', level) + + # ---------------- + # observation loop + # ---------------- + for ref in reference_data_set: + # load data + print('ref:', ref) + ref_dataset_name = obs_dict[varname][ref] + ref_data_full_path = os.path.join( + reference_data_path, + obs_dict[varname][ref_dataset_name]["template"]) + print('ref_data_full_path:', ref_data_full_path) + #ds_ref = xcdat_open(ref_data_full_path, data_var=var) + ds_ref = xcdat_open(ref_data_full_path, data_var=var, decode_times=False) # NOTE: decode_times=False will be removed once obs4MIP written using xcdat + print('ds_ref:', ds_ref) + # regrid + ds_ref_regridded = ds_ref.regridder.horizontal(var, t_grid, tool=regrid_tool) + print('ds_ref_regridded:', ds_ref_regridded) + + + # ---------- + # model loop + # ---------- + for model in test_data_set: + # load data + print('model:', model) + + # regrid + + # ----------- + # region loop + # ----------- + for region in regions[varname]: + print('region:', region) + + if region.split('_')[0] in ['land', 'ocean']: + is_masking = True + else: + is_masking = False + + # write JSON for single model / single obs (need to accumulate later) / single variable + + # write JSON for all models / all obs / single variable diff --git a/pcmdi_metrics/version.py b/pcmdi_metrics/version.py index 883fe4d81..825cd1204 100644 --- a/pcmdi_metrics/version.py +++ b/pcmdi_metrics/version.py @@ -1,3 +1,3 @@ __version__ = 'v2.3.1' -__git_tag_describe__ = 'v2.3.1-66-g0b57dc9' -__git_sha1__ = '0b57dc9553844fb1aef1270115b54a39fb7e80c2' +__git_tag_describe__ = 'v2.3.1-69-g4d44d44' +__git_sha1__ = '4d44d4475f6eae72adf9c606c6ffac0300ded28c' diff --git a/setup.py b/setup.py index 5d53b57a1..739bcc84b 100755 --- a/setup.py +++ b/setup.py @@ -45,6 +45,7 @@ scripts = [ "pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py", "pcmdi_metrics/mean_climate/mean_climate_driver.py", + "pcmdi_metrics/mean_climate/mean_climate_driver_new.py", "pcmdi_metrics/monsoon_wang/scripts/mpindex_compute.py", "pcmdi_metrics/monsoon_sperber/scripts/driver_monsoon_sperber.py", "pcmdi_metrics/mjo/mjo_metrics_driver.py", From 54451729251ff56cec69ecbdba0a487a439c2932 Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Wed, 7 Dec 2022 15:29:15 -0800 Subject: [PATCH 059/130] in progress --- .../mean_climate/mean_climate_driver_new.py | 248 ++++++++++-------- 1 file changed, 132 insertions(+), 116 deletions(-) diff --git a/pcmdi_metrics/mean_climate/mean_climate_driver_new.py b/pcmdi_metrics/mean_climate/mean_climate_driver_new.py index 8c9c99b23..f22939646 100755 --- a/pcmdi_metrics/mean_climate/mean_climate_driver_new.py +++ b/pcmdi_metrics/mean_climate/mean_climate_driver_new.py @@ -12,125 +12,141 @@ from pcmdi_metrics.io import xcdat_open from pcmdi_metrics.mean_climate.lib import create_mean_climate_parser -parser = create_mean_climate_parser() -parameter = parser.get_parameter(cmd_default_vars=False, argparse_vals_only=False) - -""" -dir(parameter): ['__add__', '__class__', '__delattr__', '__dict__', '__dir__', '__doc__', '__eq__', '__format__', '__ge__', '__getattribute__', '__gt__', '__hash__', '__init__', '__init_subclass__', '__le__', '__lt__', '__module__', '__ne__', '__new__', '__reduce__', '__reduce_ex__', '__repr__', '__setattr__', '__sizeof__', '__str__', '__subclasshook__', '__weakref__', 'case_id', 'check_case_id', 'check_custom_keys', 'check_custom_observations_path', 'check_filename_output_template', 'check_filename_template', 'check_generate_surface_type_land_fraction', 'check_metrics_output_path', 'check_period', 'check_realization', 'check_ref', 'check_reference_data_path', 'check_reference_data_set', 'check_regions', 'check_regions_specs', 'check_regions_values', 'check_regrid_method', 'check_regrid_method_ocn', 'check_regrid_tool', 'check_regrid_tool_ocn', 'check_save_test_clims', 'check_str', 'check_str_seq_in_str_list', 'check_str_var_in_str_list', 'check_surface_type_land_fraction_filename_template', 'check_target_grid', 'check_test_clims_interpolated_output', 'check_test_data_path', 'check_test_data_set', 'check_values', 'check_vars', 'custom_keys', 'custom_observations_path', 'dry_run', 'filename_output_template', 'filename_template', 'generate_sftlf', 'generate_surface_type_land_fraction', 'import_user_parameter_file_as_module', 'load_parameter_from_py', 'load_parameters_from_module', 'metrics_output_path', 'os', 'output_json_template', 'period', 'r', 'realization', 'reference_data_path', 'reference_data_set', 'regions', 'regions_specs', 'regions_values', 'regrid_method', 'regrid_method_ocn', 'regrid_tool', 'regrid_tool_ocn', 'save_test_clims', 'sftlf_filename_template', 'surface_type_land_fraction_filename_template', 't', 'target_grid', 'test_clims_interpolated_output', 'test_data_path', 'test_data_set', 'user_notes', 'v', 'vars'] -""" - -case_id = parameter.case_id -test_data_set = parameter.test_data_set -vars = parameter.vars -reference_data_set = parameter.reference_data_set -target_grid = parameter.target_grid -regrid_tool = parameter.regrid_tool -regrid_method = parameter.regrid_method -regrid_tool_ocn = parameter.regrid_tool_ocn -save_test_clims = parameter.save_test_clims -test_clims_interpolated_output = parameter.test_clims_interpolated_output -filename_template = parameter.filename_template -sftlf_filename_template = parameter.sftlf_filename_template -generate_sftlf = parameter.generate_sftlf -regions = parameter.regions -test_data_path = parameter.test_data_path -reference_data_path = parameter.reference_data_path -metrics_output_path = parameter.metrics_output_path - -print( - 'case_id: ', case_id, '\n', - 'test_data_set:', test_data_set, '\n', - 'vars:', vars, '\n', - 'reference_data_set:', reference_data_set, '\n', - 'target_grid:', target_grid, '\n', - 'regrid_tool:', regrid_tool, '\n', - 'regrid_method:', regrid_method, '\n', - 'regrid_tool_ocn:', regrid_tool_ocn, '\n', - 'save_test_clims:', save_test_clims, '\n', - 'test_clims_interpolated_output:', test_clims_interpolated_output, '\n', - 'filename_template:', filename_template, '\n', - 'sftlf_filename_template:', sftlf_filename_template, '\n', - 'generate_sftlf:', generate_sftlf, '\n', - 'regions:', regions, '\n', - 'test_data_path:', test_data_path, '\n', - 'reference_data_path:', reference_data_path, '\n', - 'metrics_output_path:', metrics_output_path, '\n') - -default_regions = ['global', 'NHEX', 'SHEX', 'TROPICS'] - -print('--- start mean climate metrics calculation ---') - -# generate target grid -if target_grid == "2.5x2.5": - t_grid = xcdat.create_uniform_grid(-88.875, 88.625, 2.5, 0, 357.5, 2.5) - t_grid_cdms2 = cdms2.createUniformGrid(-88.875, 72, 2.5, 0, 144, 2.5) - sft = cdutil.generateLandSeaMask(t_grid_cdms2) - -# load obs catalogue json -egg_pth = resources.resource_path() -obs_file_name = "obs_info_dictionary.json" -obs_file_path = os.path.join(egg_pth, obs_file_name) -with open(obs_file_path) as fo: - obs_dict = json.loads(fo.read()) -# print('obs_dict:', obs_dict) - -# ------------- -# variable loop -# ------------- -for var in vars: - - if '_' in var or '-' in var: - varname = split('_|-', var)[0] - level = float(split('_|-', var)[1]) * 100 # hPa to Pa - else: - varname = var - level = None - - if varname not in list(regions.keys()): - regions[varname] = default_regions - - print('varname:', varname) - print('level:', level) - - # ---------------- - # observation loop - # ---------------- - for ref in reference_data_set: - # load data - print('ref:', ref) - ref_dataset_name = obs_dict[varname][ref] - ref_data_full_path = os.path.join( - reference_data_path, - obs_dict[varname][ref_dataset_name]["template"]) - print('ref_data_full_path:', ref_data_full_path) - #ds_ref = xcdat_open(ref_data_full_path, data_var=var) - ds_ref = xcdat_open(ref_data_full_path, data_var=var, decode_times=False) # NOTE: decode_times=False will be removed once obs4MIP written using xcdat - print('ds_ref:', ds_ref) - # regrid - ds_ref_regridded = ds_ref.regridder.horizontal(var, t_grid, tool=regrid_tool) - print('ds_ref_regridded:', ds_ref_regridded) - - - # ---------- - # model loop - # ---------- - for model in test_data_set: - # load data - print('model:', model) +def main(): + parser = create_mean_climate_parser() + parameter = parser.get_parameter(cmd_default_vars=False, argparse_vals_only=False) + + # parameters + case_id = parameter.case_id + test_data_set = parameter.test_data_set + vars = parameter.vars + reference_data_set = parameter.reference_data_set + target_grid = parameter.target_grid + regrid_tool = parameter.regrid_tool + regrid_method = parameter.regrid_method + regrid_tool_ocn = parameter.regrid_tool_ocn + save_test_clims = parameter.save_test_clims + test_clims_interpolated_output = parameter.test_clims_interpolated_output + filename_template = parameter.filename_template + sftlf_filename_template = parameter.sftlf_filename_template + generate_sftlf = parameter.generate_sftlf + regions = parameter.regions + test_data_path = parameter.test_data_path + reference_data_path = parameter.reference_data_path + metrics_output_path = parameter.metrics_output_path + + print( + 'case_id: ', case_id, '\n', + 'test_data_set:', test_data_set, '\n', + 'vars:', vars, '\n', + 'reference_data_set:', reference_data_set, '\n', + 'target_grid:', target_grid, '\n', + 'regrid_tool:', regrid_tool, '\n', + 'regrid_method:', regrid_method, '\n', + 'regrid_tool_ocn:', regrid_tool_ocn, '\n', + 'save_test_clims:', save_test_clims, '\n', + 'test_clims_interpolated_output:', test_clims_interpolated_output, '\n', + 'filename_template:', filename_template, '\n', + 'sftlf_filename_template:', sftlf_filename_template, '\n', + 'generate_sftlf:', generate_sftlf, '\n', + 'regions:', regions, '\n', + 'test_data_path:', test_data_path, '\n', + 'reference_data_path:', reference_data_path, '\n', + 'metrics_output_path:', metrics_output_path, '\n') + + default_regions = ['global', 'NHEX', 'SHEX', 'TROPICS'] + + print('--- start mean climate metrics calculation ---') + + # generate target grid + if target_grid == "2.5x2.5": + t_grid = xcdat.create_uniform_grid(-88.875, 88.625, 2.5, 0, 357.5, 2.5) + print('type(t_grid):', type(t_grid)) + t_grid_cdms2 = cdms2.createUniformGrid(-88.875, 72, 2.5, 0, 144, 2.5) + sft = cdutil.generateLandSeaMask(t_grid_cdms2) + + # load obs catalogue json + egg_pth = resources.resource_path() + obs_file_name = "obs_info_dictionary.json" + obs_file_path = os.path.join(egg_pth, obs_file_name) + with open(obs_file_path) as fo: + obs_dict = json.loads(fo.read()) + # print('obs_dict:', obs_dict) + + # ------------- + # variable loop + # ------------- + for var in vars: + + if '_' in var or '-' in var: + varname = split('_|-', var)[0] + level = float(split('_|-', var)[1]) * 100 # hPa to Pa + else: + varname = var + level = None + + if varname not in list(regions.keys()): + regions[varname] = default_regions + + print('varname:', varname) + print('level:', level) + + # ---------------- + # observation loop + # ---------------- + for ref in reference_data_set: + print('ref:', ref) + # identify data to load + ref_dataset_name = obs_dict[varname][ref] + ref_data_full_path = os.path.join( + reference_data_path, + obs_dict[varname][ref_dataset_name]["template"]) + print('ref_data_full_path:', ref_data_full_path) + # load data + ds_ref = xcdat_open(ref_data_full_path, data_var=var, decode_times=False) # NOTE: decode_times=False will be removed once obs4MIP written using xcdat + print('ds_ref:', ds_ref) # regrid + ds_ref_regridded = ds_ref.regridder.horizontal(var, t_grid, tool=regrid_tool) + print('ds_ref_regridded:', ds_ref_regridded) + + # ---------- + # model loop + # ---------- + for model in test_data_set: + # load data + print('model:', model) + + # regrid + + # ----------- + # region loop + # ----------- + for region in regions[varname]: + print('region:', region) + + if region.split('_')[0] in ['land', 'ocean']: + is_masking = True + else: + is_masking = False + + # write JSON for single model / single obs (need to accumulate later) / single variable + + # write JSON for all models / all obs / single variable + + +def load_and_regrid(data_path, var, t_grid): + """_summary_ + + Args: + data_path (str): full data path for nc or xml file + var (str): variable name + t_grid (_type_): _description_ + """ + print('123') - # ----------- - # region loop - # ----------- - for region in regions[varname]: - print('region:', region) - if region.split('_')[0] in ['land', 'ocean']: - is_masking = True - else: - is_masking = False - # write JSON for single model / single obs (need to accumulate later) / single variable - # write JSON for all models / all obs / single variable +if __name__ == "__main__": + main() \ No newline at end of file From 32fe331363217f2c6f9e915dee08aa80885a9d45 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Wed, 7 Dec 2022 15:31:19 -0800 Subject: [PATCH 060/130] in progress --- pcmdi_metrics/mean_climate/mean_climate_driver_new.py | 4 ++-- pcmdi_metrics/version.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/pcmdi_metrics/mean_climate/mean_climate_driver_new.py b/pcmdi_metrics/mean_climate/mean_climate_driver_new.py index f22939646..711ff0525 100755 --- a/pcmdi_metrics/mean_climate/mean_climate_driver_new.py +++ b/pcmdi_metrics/mean_climate/mean_climate_driver_new.py @@ -141,7 +141,7 @@ def load_and_regrid(data_path, var, t_grid): Args: data_path (str): full data path for nc or xml file var (str): variable name - t_grid (_type_): _description_ + t_grid (xarray.core.dataset.Dataset): target grid to regrid """ print('123') @@ -149,4 +149,4 @@ def load_and_regrid(data_path, var, t_grid): if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/pcmdi_metrics/version.py b/pcmdi_metrics/version.py index 825cd1204..8a02a30c5 100644 --- a/pcmdi_metrics/version.py +++ b/pcmdi_metrics/version.py @@ -1,3 +1,3 @@ __version__ = 'v2.3.1' -__git_tag_describe__ = 'v2.3.1-69-g4d44d44' -__git_sha1__ = '4d44d4475f6eae72adf9c606c6ffac0300ded28c' +__git_tag_describe__ = 'v2.3.1-71-g5445172' +__git_sha1__ = '54451729251ff56cec69ecbdba0a487a439c2932' From 400f54f135b248bd9f097b1b3ae8296393fe4ff7 Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Wed, 7 Dec 2022 15:55:34 -0800 Subject: [PATCH 061/130] in progress --- .../mean_climate/mean_climate_driver_new.py | 27 +++++++++++-------- 1 file changed, 16 insertions(+), 11 deletions(-) diff --git a/pcmdi_metrics/mean_climate/mean_climate_driver_new.py b/pcmdi_metrics/mean_climate/mean_climate_driver_new.py index 711ff0525..faaf4af70 100755 --- a/pcmdi_metrics/mean_climate/mean_climate_driver_new.py +++ b/pcmdi_metrics/mean_climate/mean_climate_driver_new.py @@ -103,12 +103,8 @@ def main(): reference_data_path, obs_dict[varname][ref_dataset_name]["template"]) print('ref_data_full_path:', ref_data_full_path) - # load data - ds_ref = xcdat_open(ref_data_full_path, data_var=var, decode_times=False) # NOTE: decode_times=False will be removed once obs4MIP written using xcdat - print('ds_ref:', ds_ref) - # regrid - ds_ref_regridded = ds_ref.regridder.horizontal(var, t_grid, tool=regrid_tool) - print('ds_ref_regridded:', ds_ref_regridded) + # load data and regrid + ds_ref = load_and_regrid(ref_data_full_path, var, t_grid, decode_times=False, regrid_tool=regrid_tool, debug=True) # ---------- # model loop @@ -135,17 +131,26 @@ def main(): # write JSON for all models / all obs / single variable -def load_and_regrid(data_path, var, t_grid): - """_summary_ +def load_and_regrid(data_path, var, t_grid, decode_times=True, regrid_tool='regrid2', debug=False): + """Load data and regrid to target grid Args: data_path (str): full data path for nc or xml file var (str): variable name t_grid (xarray.core.dataset.Dataset): target grid to regrid + decode_times (bool): Default is True. decode_times=False will be removed once obs4MIP written using xcdat + regrid_tool (str): Name of the regridding tool. See https://xcdat.readthedocs.io/en/stable/generated/xarray.Dataset.regridder.horizontal.html for more info + debug (bool): Default is False. If True, print more info to help debugging process """ - print('123') - - + # load data + ds = xcdat_open(data_path, data_var=var, decode_times=decode_times) # NOTE: decode_times=False will be removed once obs4MIP written using xcdat + if debug: + print('ds_ref:', ds) + # regrid + ds_regridded = ds.regridder.horizontal(var, t_grid, tool=regrid_tool) + if debug: + print('ds_regridded:', ds_regridded) + return ds_regridded if __name__ == "__main__": From 4de328548cb6cc6cf1635c9f1e663112e6f49104 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Wed, 7 Dec 2022 20:38:40 -0800 Subject: [PATCH 062/130] in progress --- .../mean_climate/mean_climate_driver_new.py | 67 +++++++++++++++---- pcmdi_metrics/version.py | 4 +- 2 files changed, 55 insertions(+), 16 deletions(-) diff --git a/pcmdi_metrics/mean_climate/mean_climate_driver_new.py b/pcmdi_metrics/mean_climate/mean_climate_driver_new.py index faaf4af70..ba0e606bf 100755 --- a/pcmdi_metrics/mean_climate/mean_climate_driver_new.py +++ b/pcmdi_metrics/mean_climate/mean_climate_driver_new.py @@ -10,7 +10,10 @@ from pcmdi_metrics import resources from pcmdi_metrics.io import xcdat_open +from pcmdi_metrics.io import load_regions_specs, region_subset from pcmdi_metrics.mean_climate.lib import create_mean_climate_parser +from pcmdi_metrics.mean_climate.lib import compute_metrics +from pcmdi_metrics.variability_mode.lib import tree def main(): @@ -36,6 +39,8 @@ def main(): reference_data_path = parameter.reference_data_path metrics_output_path = parameter.metrics_output_path + debug = True + print( 'case_id: ', case_id, '\n', 'test_data_set:', test_data_set, '\n', @@ -55,9 +60,10 @@ def main(): 'reference_data_path:', reference_data_path, '\n', 'metrics_output_path:', metrics_output_path, '\n') - default_regions = ['global', 'NHEX', 'SHEX', 'TROPICS'] + print('--- prepare mean climate metrics calculation ---') - print('--- start mean climate metrics calculation ---') + regions_specs = load_regions_specs() + default_regions = ['global', 'NHEX', 'SHEX', 'TROPICS'] # generate target grid if target_grid == "2.5x2.5": @@ -72,7 +78,13 @@ def main(): obs_file_path = os.path.join(egg_pth, obs_file_name) with open(obs_file_path) as fo: obs_dict = json.loads(fo.read()) - # print('obs_dict:', obs_dict) + if debug: + print('obs_dict:', json.dumps(obs_dict, indent=4, sort_keys=True)) + + # set dictionary for .json record + result_dict = tree() + + print('--- start mean climate metrics calculation ---') # ------------- # variable loop @@ -81,7 +93,7 @@ def main(): if '_' in var or '-' in var: varname = split('_|-', var)[0] - level = float(split('_|-', var)[1]) * 100 # hPa to Pa + level = float(split('_|-', var)[1]) else: varname = var level = None @@ -97,23 +109,28 @@ def main(): # ---------------- for ref in reference_data_set: print('ref:', ref) - # identify data to load + # identify data to load (annual cycle (AC) data is loading in) ref_dataset_name = obs_dict[varname][ref] ref_data_full_path = os.path.join( reference_data_path, obs_dict[varname][ref_dataset_name]["template"]) print('ref_data_full_path:', ref_data_full_path) # load data and regrid - ds_ref = load_and_regrid(ref_data_full_path, var, t_grid, decode_times=False, regrid_tool=regrid_tool, debug=True) + ds_ref = load_and_regrid(ref_data_full_path, varname, level, t_grid, decode_times=False, regrid_tool=regrid_tool, debug=debug) + ds_ref_dict = dict() # ---------- # model loop # ---------- for model in test_data_set: - # load data print('model:', model) - - # regrid + ds_model_dict = dict() + # identify data to load (annual cycle (AC) data is loading in) + model_data_full_path = os.path.join( + test_data_path, + filename_template.replace('%(variable)', varname).replace('%(model)', model)) + # load data and regrid + ds_model = load_and_regrid(model_data_full_path, varname, level, t_grid, regrid_tool=regrid_tool, debug=debug) # ----------- # region loop @@ -121,37 +138,59 @@ def main(): for region in regions[varname]: print('region:', region) + # land/sea mask if region.split('_')[0] in ['land', 'ocean']: is_masking = True else: is_masking = False + # spatial subset + if region.lower() in ['global', 'land', 'ocean']: + ds_model_dict[region] = ds_model + if region not in list(ds_ref_dict.keys()): + ds_ref_dict[region] = ds_ref + else: + ds_model_dict[region] = region_subset(ds_model, region_specs, region=region) + if region not in list(ds_ref_dict.keys()): + ds_ref_dict[region] = region_subset(ds_ref, region_specs, region=region) + + # compute metrics + result_dict["RESULTS"][model][ref][region] = compute_metrics(varname, ds_model_dict[region], ds_ref_dict[region]) + # write JSON for single model / single obs (need to accumulate later) / single variable + print('result_dict:', result_dict) # write JSON for all models / all obs / single variable + if debug: + print('result_dict:', json.dumps(result_dict, indent=4, sort_keys=True)) -def load_and_regrid(data_path, var, t_grid, decode_times=True, regrid_tool='regrid2', debug=False): +def load_and_regrid(data_path, varname, level=None, t_grid=None, decode_times=True, regrid_tool='regrid2', debug=False): """Load data and regrid to target grid Args: data_path (str): full data path for nc or xml file - var (str): variable name + varname (str): variable name + level (float): level to extract (unit in hPa) t_grid (xarray.core.dataset.Dataset): target grid to regrid decode_times (bool): Default is True. decode_times=False will be removed once obs4MIP written using xcdat regrid_tool (str): Name of the regridding tool. See https://xcdat.readthedocs.io/en/stable/generated/xarray.Dataset.regridder.horizontal.html for more info debug (bool): Default is False. If True, print more info to help debugging process """ # load data - ds = xcdat_open(data_path, data_var=var, decode_times=decode_times) # NOTE: decode_times=False will be removed once obs4MIP written using xcdat + ds = xcdat_open(data_path, data_var=varname, decode_times=decode_times) # NOTE: decode_times=False will be removed once obs4MIP written using xcdat + if level is not None: + level = level * 100 # hPa to Pa + ds = ds.sel(plev=level) if debug: - print('ds_ref:', ds) + print('ds:', ds) # regrid - ds_regridded = ds.regridder.horizontal(var, t_grid, tool=regrid_tool) + ds_regridded = ds.regridder.horizontal(varname, t_grid, tool=regrid_tool) if debug: print('ds_regridded:', ds_regridded) return ds_regridded + if __name__ == "__main__": main() diff --git a/pcmdi_metrics/version.py b/pcmdi_metrics/version.py index 8a02a30c5..1bde6ee0c 100644 --- a/pcmdi_metrics/version.py +++ b/pcmdi_metrics/version.py @@ -1,3 +1,3 @@ __version__ = 'v2.3.1' -__git_tag_describe__ = 'v2.3.1-71-g5445172' -__git_sha1__ = '54451729251ff56cec69ecbdba0a487a439c2932' +__git_tag_describe__ = 'v2.3.1-73-g400f54f' +__git_sha1__ = '400f54f135b248bd9f097b1b3ae8296393fe4ff7' From e8b8a29293daacacec78ac1baee2f4c2f4cd3442 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 8 Dec 2022 16:17:06 -0800 Subject: [PATCH 063/130] clean up --- .../mean_climate/mean_climate_driver_new.py | 188 +++++++++++++----- 1 file changed, 141 insertions(+), 47 deletions(-) diff --git a/pcmdi_metrics/mean_climate/mean_climate_driver_new.py b/pcmdi_metrics/mean_climate/mean_climate_driver_new.py index ba0e606bf..5897b4051 100755 --- a/pcmdi_metrics/mean_climate/mean_climate_driver_new.py +++ b/pcmdi_metrics/mean_climate/mean_climate_driver_new.py @@ -2,17 +2,18 @@ import json import os +from copy import deepcopy from re import split import cdms2 import cdutil +import numpy as np import xcdat from pcmdi_metrics import resources -from pcmdi_metrics.io import xcdat_open -from pcmdi_metrics.io import load_regions_specs, region_subset -from pcmdi_metrics.mean_climate.lib import create_mean_climate_parser -from pcmdi_metrics.mean_climate.lib import compute_metrics +from pcmdi_metrics.io import load_regions_specs, region_subset, xcdat_open +from pcmdi_metrics.io.base import Base +from pcmdi_metrics.mean_climate.lib import compute_metrics, create_mean_climate_parser from pcmdi_metrics.variability_mode.lib import tree @@ -23,6 +24,7 @@ def main(): # parameters case_id = parameter.case_id test_data_set = parameter.test_data_set + realization = parameter.realization vars = parameter.vars reference_data_set = parameter.reference_data_set target_grid = parameter.target_grid @@ -37,13 +39,21 @@ def main(): regions = parameter.regions test_data_path = parameter.test_data_path reference_data_path = parameter.reference_data_path - metrics_output_path = parameter.metrics_output_path + metrics_output_path = parameter.metrics_output_path.replace('%(case_id)', case_id) + + cmec = False # temporary + + if realization is None: + realization = "" + elif isinstance(realization, str): + realization = [realization] debug = True print( 'case_id: ', case_id, '\n', 'test_data_set:', test_data_set, '\n', + 'realization:', realization, '\n', 'vars:', vars, '\n', 'reference_data_set:', reference_data_set, '\n', 'target_grid:', target_grid, '\n', @@ -67,10 +77,23 @@ def main(): # generate target grid if target_grid == "2.5x2.5": + # target grid for regridding t_grid = xcdat.create_uniform_grid(-88.875, 88.625, 2.5, 0, 357.5, 2.5) - print('type(t_grid):', type(t_grid)) + if debug: + print('type(t_grid):', type(t_grid)) # Expected type is 'xarray.core.dataset.Dataset' + print('t_grid:', t_grid) + # identical target grid in cdms2 to use generateLandSeaMask function that is yet to exist in xcdat t_grid_cdms2 = cdms2.createUniformGrid(-88.875, 72, 2.5, 0, 144, 2.5) + # generate land sea mask for the target grid sft = cdutil.generateLandSeaMask(t_grid_cdms2) + if debug: + print('sft:', sft) + print('sft.getAxisList():', sft.getAxisList()) + # add sft to target grid dataset + t_grid['sftlf'] = (['lat', 'lon'], np.array(sft)) + if debug: + print('t_grid (after sftlf added):', t_grid) + t_grid.to_netcdf('target_grid.nc') # load obs catalogue json egg_pth = resources.resource_path() @@ -78,8 +101,8 @@ def main(): obs_file_path = os.path.join(egg_pth, obs_file_name) with open(obs_file_path) as fo: obs_dict = json.loads(fo.read()) - if debug: - print('obs_dict:', json.dumps(obs_dict, indent=4, sort_keys=True)) + # if debug: + # print('obs_dict:', json.dumps(obs_dict, indent=4, sort_keys=True)) # set dictionary for .json record result_dict = tree() @@ -124,45 +147,76 @@ def main(): # ---------- for model in test_data_set: print('model:', model) - ds_model_dict = dict() - # identify data to load (annual cycle (AC) data is loading in) - model_data_full_path = os.path.join( - test_data_path, - filename_template.replace('%(variable)', varname).replace('%(model)', model)) - # load data and regrid - ds_model = load_and_regrid(model_data_full_path, varname, level, t_grid, regrid_tool=regrid_tool, debug=debug) - - # ----------- - # region loop - # ----------- - for region in regions[varname]: - print('region:', region) - - # land/sea mask - if region.split('_')[0] in ['land', 'ocean']: - is_masking = True - else: - is_masking = False - - # spatial subset - if region.lower() in ['global', 'land', 'ocean']: - ds_model_dict[region] = ds_model - if region not in list(ds_ref_dict.keys()): - ds_ref_dict[region] = ds_ref - else: - ds_model_dict[region] = region_subset(ds_model, region_specs, region=region) - if region not in list(ds_ref_dict.keys()): - ds_ref_dict[region] = region_subset(ds_ref, region_specs, region=region) - - # compute metrics - result_dict["RESULTS"][model][ref][region] = compute_metrics(varname, ds_model_dict[region], ds_ref_dict[region]) - - # write JSON for single model / single obs (need to accumulate later) / single variable - print('result_dict:', result_dict) - - # write JSON for all models / all obs / single variable - if debug: - print('result_dict:', json.dumps(result_dict, indent=4, sort_keys=True)) + for run in realization: + ds_test_dict = dict() + # identify data to load (annual cycle (AC) data is loading in) + test_data_full_path = os.path.join( + test_data_path, + filename_template.replace('%(variable)', varname).replace('%(model)', model).replace('%(realization)', run)) + # load data and regrid + ds_test = load_and_regrid(test_data_full_path, varname, level, t_grid, regrid_tool=regrid_tool, debug=debug) + + # ----------- + # region loop + # ----------- + for region in regions[varname]: + print('region:', region) + + # land/sea mask -- conduct masking only for variable data array, not entire data + if region.split('_')[0] in ['land', 'ocean']: + surface_type = region.split('_')[0] + ds_test_tmp = ds_test.copy(deep=True) + ds_ref_tmp = ds_ref.copy(deep=True) + if surface_type == 'land': + ds_test_tmp[varname] = ds_test[varname].where(t_grid['sftlf'] != 0.) + ds_ref_tmp[varname] = ds_ref[varname].where(t_grid['sftlf'] != 0.) + elif surface_type == 'ocean': + ds_test_tmp[varname] = ds_test[varname].where(t_grid['sftlf'] == 0.) + ds_ref_tmp[varname] = ds_ref[varname].where(t_grid['sftlf'] == 0.) + else: + ds_test_tmp = ds_test + ds_ref_tmp = ds_ref + print('mask done') + + # spatial subset + if region.lower() in ['global', 'land', 'ocean']: + ds_test_dict[region] = ds_test_tmp + if region not in list(ds_ref_dict.keys()): + ds_ref_dict[region] = ds_ref_tmp + else: + ds_test_tmp = region_subset(ds_test_tmp, regions_specs, region=region) + ds_test_dict[region] = ds_test_tmp + if region not in list(ds_ref_dict.keys()): + ds_ref_dict[region] = region_subset(ds_ref_tmp, regions_specs, region=region) + print('spatial subset done') + + if debug: + print('ds_test_tmp:', ds_test_tmp) + ds_test_tmp.to_netcdf('_'.join([var, 'model', region + '.nc'])) + + # compute metrics + print('compute metrics start') + result_dict["RESULTS"][model][ref][run][region] = compute_metrics(varname, ds_test_dict[region], ds_ref_dict[region]) + + # write individual JSON for single model (multi realizations if exist) / single obs (need to accumulate later) / single variable + json_filename_tmp = "_".join([model, var, target_grid, regrid_tool, regrid_method, "metrics"]) + mean_climate_metrics_to_json( + os.path.join(metrics_output_path, var), + json_filename_tmp, + result_dict, + model=model, + run=run, + cmec_flag=cmec, + ) + + # write collective JSON for all models / all obs / single variable + json_filename = "_".join([var, target_grid, regrid_tool, regrid_method, "metrics"]) + mean_climate_metrics_to_json( + metrics_output_path, + json_filename, + result_dict, + cmec_flag=cmec, + ) def load_and_regrid(data_path, varname, level=None, t_grid=None, decode_times=True, regrid_tool='regrid2', debug=False): @@ -191,6 +245,46 @@ def load_and_regrid(data_path, varname, level=None, t_grid=None, decode_times=Tr return ds_regridded +def mean_climate_metrics_to_json( + outdir, json_filename, result_dict, model=None, run=None, cmec_flag=False +): + # Open JSON + JSON = Base( + outdir, json_filename + ) + # Dict for JSON + json_dict = deepcopy(result_dict) + if model is not None or run is not None: + # Preserve only needed dict branch -- delete rest keys + models_in_dict = list(json_dict["RESULTS"].keys()) + for m in models_in_dict: + if m == model: + runs_in_model_dict = list(json_dict["RESULTS"][m].keys()) + for r in runs_in_model_dict: + if r != run and run is not None: + del json_dict["RESULTS"][m][r] + else: + del json_dict["RESULTS"][m] + # Write selected dict to JSON + JSON.write( + json_dict, + json_structure=[ + "model", + "reference", + "rip", + "region", + "statistic", + "season", + ], + indent=4, + separators=(",", ": "), + mode="r+", + sort_keys=True, + ) + if cmec_flag: + print("Writing cmec file") + JSON.write_cmec(indent=4, separators=(",", ": ")) + if __name__ == "__main__": main() From 5cabf1fc42ef545ccbc0eb6b4dfd119192589049 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 8 Dec 2022 17:00:10 -0800 Subject: [PATCH 064/130] remove old files no longer in use --- .../{ => deprecated}/lib/dataset.py | 0 .../mean_climate/{ => deprecated}/lib/io.py | 0 .../{ => deprecated}/lib/model.py | 0 .../{ => deprecated}/lib/observation.py | 0 .../{ => deprecated}/lib/outputmetrics.py | 0 .../deprecated/mean_climate_driver.py | 7 + pcmdi_metrics/mean_climate/lib/__init__.py | 14 +- ...ics_calculations.py => compute_metrics.py} | 43 +- .../mean_climate/lib/compute_statistics.py | 3 - .../lib/create_mean_climate_parser.py | 232 ++++++++ .../lib/mean_climate_metrics_driver.py | 498 ------------------ .../mean_climate/mean_climate_driver.py | 229 +++++++- .../mean_climate/mean_climate_driver_new.py | 290 ---------- pcmdi_metrics/version.py | 4 +- setup.py | 1 - 15 files changed, 484 insertions(+), 837 deletions(-) rename pcmdi_metrics/mean_climate/{ => deprecated}/lib/dataset.py (100%) rename pcmdi_metrics/mean_climate/{ => deprecated}/lib/io.py (100%) rename pcmdi_metrics/mean_climate/{ => deprecated}/lib/model.py (100%) rename pcmdi_metrics/mean_climate/{ => deprecated}/lib/observation.py (100%) rename pcmdi_metrics/mean_climate/{ => deprecated}/lib/outputmetrics.py (100%) create mode 100755 pcmdi_metrics/mean_climate/deprecated/mean_climate_driver.py rename pcmdi_metrics/mean_climate/lib/{mean_climate_metrics_calculations.py => compute_metrics.py} (86%) create mode 100644 pcmdi_metrics/mean_climate/lib/create_mean_climate_parser.py delete mode 100644 pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py delete mode 100755 pcmdi_metrics/mean_climate/mean_climate_driver_new.py diff --git a/pcmdi_metrics/mean_climate/lib/dataset.py b/pcmdi_metrics/mean_climate/deprecated/lib/dataset.py similarity index 100% rename from pcmdi_metrics/mean_climate/lib/dataset.py rename to pcmdi_metrics/mean_climate/deprecated/lib/dataset.py diff --git a/pcmdi_metrics/mean_climate/lib/io.py b/pcmdi_metrics/mean_climate/deprecated/lib/io.py similarity index 100% rename from pcmdi_metrics/mean_climate/lib/io.py rename to pcmdi_metrics/mean_climate/deprecated/lib/io.py diff --git a/pcmdi_metrics/mean_climate/lib/model.py b/pcmdi_metrics/mean_climate/deprecated/lib/model.py similarity index 100% rename from pcmdi_metrics/mean_climate/lib/model.py rename to pcmdi_metrics/mean_climate/deprecated/lib/model.py diff --git a/pcmdi_metrics/mean_climate/lib/observation.py b/pcmdi_metrics/mean_climate/deprecated/lib/observation.py similarity index 100% rename from pcmdi_metrics/mean_climate/lib/observation.py rename to pcmdi_metrics/mean_climate/deprecated/lib/observation.py diff --git a/pcmdi_metrics/mean_climate/lib/outputmetrics.py b/pcmdi_metrics/mean_climate/deprecated/lib/outputmetrics.py similarity index 100% rename from pcmdi_metrics/mean_climate/lib/outputmetrics.py rename to pcmdi_metrics/mean_climate/deprecated/lib/outputmetrics.py diff --git a/pcmdi_metrics/mean_climate/deprecated/mean_climate_driver.py b/pcmdi_metrics/mean_climate/deprecated/mean_climate_driver.py new file mode 100755 index 000000000..5429360d5 --- /dev/null +++ b/pcmdi_metrics/mean_climate/deprecated/mean_climate_driver.py @@ -0,0 +1,7 @@ +#!/usr/bin/env python +from pcmdi_metrics.mean_climate.lib import PMPDriver, create_mean_climate_parser + +parser = create_mean_climate_parser() +parameter = parser.get_parameter(cmd_default_vars=False, argparse_vals_only=False) +driver = PMPDriver(parameter) +driver.run_diags() diff --git a/pcmdi_metrics/mean_climate/lib/__init__.py b/pcmdi_metrics/mean_climate/lib/__init__.py index 5d395052c..e9f0a8713 100644 --- a/pcmdi_metrics/mean_climate/lib/__init__.py +++ b/pcmdi_metrics/mean_climate/lib/__init__.py @@ -1,3 +1,4 @@ +from .compute_metrics import compute_metrics # noqa from .compute_statistics import ( # noqa annual_mean, bias_xy, @@ -13,13 +14,6 @@ std_xyt, zonal_mean, ) -from .mean_climate_metrics_calculations import compute_metrics # noqa -from .mean_climate_metrics_driver import PMPDriver, create_mean_climate_parser # noqa - -from . import dataset # DataSet # noqa # isort:skip -from . import io # noqa # isort:skip -from . import model # Model # noqa # isort:skip -from . import observation # OBS, Observation # noqa # isort:skip -from . import outputmetrics # OutputMetrics # noqa # isort:skip -from . import pmp_parameter # PMPParameter, PMPMetricsParameter # noqa # isort:skip -from . import pmp_parser # PMPParser, PMPMetricsParser # noqa # isort:skip +from .create_mean_climate_parser import create_mean_climate_parser # noqa +from .load_and_regrid import load_and_regrid # noqa +from .mean_climate_metrics_to_json import mean_climate_metrics_to_json # noqa diff --git a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_calculations.py b/pcmdi_metrics/mean_climate/lib/compute_metrics.py similarity index 86% rename from pcmdi_metrics/mean_climate/lib/mean_climate_metrics_calculations.py rename to pcmdi_metrics/mean_climate/lib/compute_metrics.py index 1c7528ab2..e662f10eb 100644 --- a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_calculations.py +++ b/pcmdi_metrics/mean_climate/lib/compute_metrics.py @@ -39,12 +39,6 @@ def compute_metrics(Var, dm, do): dm.to_netcdf('dm.nc') do.to_netcdf('do.nc') - """ - print('jwlee-test-check-calendar') - print("dm.time.encoding['calendar']: ", dm.time.encoding['calendar']) - do.time.encoding['calendar'] = dm.time.encoding['calendar'] - print("do.time.encoding['calendar']: ", do.time.encoding['calendar']) - """ metrics_dictionary = {} # SET CONDITIONAL ON INPUT VARIABLE @@ -59,63 +53,56 @@ def compute_metrics(Var, dm, do): sig_digits = ".3f" # CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD - print('jwlee-test-compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD') - print('jwlee-test-compute_metrics, rms_xyt') + print('compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD') + print('compute_metrics, rms_xyt') rms_xyt = pcmdi_metrics.mean_climate.lib.rms_xyt(dm, do, var) - print('jwlee-test-compute_metrics, stdObs_xyt') + print('compute_metrics, stdObs_xyt') stdObs_xyt = pcmdi_metrics.mean_climate.lib.std_xyt(do, var) - print('jwlee-test-compute_metrics, std_xyt') + print('compute_metrics, std_xyt') std_xyt = pcmdi_metrics.mean_climate.lib.std_xyt(dm, var) # CALCULATE ANNUAL MEANS - print('jwlee-test-compute_metrics-CALCULATE ANNUAL MEANS') + print('compute_metrics-CALCULATE ANNUAL MEANS') dm_am, do_am = pcmdi_metrics.mean_climate.lib.annual_mean(dm, do, var) # CALCULATE ANNUAL MEAN BIAS - print('jwlee-test-compute_metrics-CALCULATE ANNUAL MEAN BIAS') + print('compute_metrics-CALCULATE ANNUAL MEAN BIAS') bias_xy = pcmdi_metrics.mean_climate.lib.bias_xy(dm_am, do_am, var) # CALCULATE MEAN ABSOLUTE ERROR - print('jwlee-test-compute_metrics-CALCULATE MSE') + print('compute_metrics-CALCULATE MSE') mae_xy = pcmdi_metrics.mean_climate.lib.meanabs_xy(dm_am, do_am, var) # CALCULATE ANNUAL MEAN RMS (centered and uncentered) - print('jwlee-test-compute_metrics-CALCULATE MEAN RMS') + print('compute_metrics-CALCULATE MEAN RMS') rms_xy = pcmdi_metrics.mean_climate.lib.rms_xy(dm_am, do_am, var) rmsc_xy = pcmdi_metrics.mean_climate.lib.rmsc_xy(dm_am, do_am, var) # CALCULATE ANNUAL MEAN CORRELATION - print('jwlee-test-compute_metrics-CALCULATE MEAN CORR') + print('compute_metrics-CALCULATE MEAN CORR') cor_xy = pcmdi_metrics.mean_climate.lib.cor_xy(dm_am, do_am, var) # CALCULATE ANNUAL OBS and MOD STD - print('jwlee-test-compute_metrics-CALCULATE ANNUAL OBS AND MOD STD') + print('compute_metrics-CALCULATE ANNUAL OBS AND MOD STD') stdObs_xy = pcmdi_metrics.mean_climate.lib.std_xy(do_am, var) std_xy = pcmdi_metrics.mean_climate.lib.std_xy(dm_am, var) # CALCULATE ANNUAL OBS and MOD MEAN - print('jwlee-test-compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN') + print('compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN') meanObs_xy = pcmdi_metrics.mean_climate.lib.mean_xy(do_am, var) mean_xy = pcmdi_metrics.mean_climate.lib.mean_xy(dm_am, var) # ZONAL MEANS ###### # CALCULATE ANNUAL MEANS - print('jwlee-test-compute_metrics-CALCULATE ANNUAL MEANS') + print('compute_metrics-CALCULATE ANNUAL MEANS') dm_amzm, do_amzm = pcmdi_metrics.mean_climate.lib.zonal_mean(dm_am, do_am, var) # CALCULATE ANNUAL AND ZONAL MEAN RMS - print('jwlee-test-compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS') + print('compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS') rms_y = pcmdi_metrics.mean_climate.lib.rms_0(dm_amzm, do_amzm, var) # CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS - print('jwlee-test-compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS') - """ - dm_amzm_grown, dummy = grower(dm_amzm, dm_am, var) - dm_am_devzm = MV2.subtract(dm_am, dm_amzm_grown, var) - do_amzm_grown, dummy = grower(do_amzm, do_am, var) - do_am_devzm = MV2.subtract(do_am, do_amzm_grown, var) - rms_xy_devzm = pcmdi_metrics.mean_climate.lib.rms_xy(dm_am_devzm, do_am_devzm, var) - """ + print('compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS') dm_am_devzm = dm_am - dm_amzm do_am_devzm = do_am - do_amzm rms_xy_devzm = pcmdi_metrics.mean_climate.lib.rms_xy(dm_am_devzm, do_am_devzm, var) @@ -123,7 +110,7 @@ def compute_metrics(Var, dm, do): # CALCULATE ANNUAL AND ZONAL MEAN STD # CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD - print('jwlee-test-compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD') + print('compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD') stdObs_xy_devzm = pcmdi_metrics.mean_climate.lib.std_xy(do_am_devzm, var) std_xy_devzm = pcmdi_metrics.mean_climate.lib.std_xy(dm_am_devzm, var) diff --git a/pcmdi_metrics/mean_climate/lib/compute_statistics.py b/pcmdi_metrics/mean_climate/lib/compute_statistics.py index 850e3263b..09fec66bb 100644 --- a/pcmdi_metrics/mean_climate/lib/compute_statistics.py +++ b/pcmdi_metrics/mean_climate/lib/compute_statistics.py @@ -192,12 +192,9 @@ def rms_xyt(dm, do, var=None): "Contact": "pcmdi-metrics@llnl.gov", } ds = dm.copy(deep=True) - print('jwlee-test-rms_xyt-1') ds['diff_square'] = (dm[var] - do[var])**2 ds['diff_square_sqrt'] = np.sqrt(ds.spatial.average('diff_square', axis=['X', 'Y'])['diff_square']) - print('jwlee-test-rms_xyt-2') stat = ds.temporal.average('diff_square_sqrt')['diff_square_sqrt'].values - print('jwlee-test-rms_xyt-3') return float(stat) diff --git a/pcmdi_metrics/mean_climate/lib/create_mean_climate_parser.py b/pcmdi_metrics/mean_climate/lib/create_mean_climate_parser.py new file mode 100644 index 000000000..ace10707f --- /dev/null +++ b/pcmdi_metrics/mean_climate/lib/create_mean_climate_parser.py @@ -0,0 +1,232 @@ +#!/usr/bin/env python +import ast + +from pcmdi_metrics.mean_climate.lib import pmp_parser + + +def create_mean_climate_parser(): + parser = pmp_parser.PMPMetricsParser() + parser.add_argument( + "--case_id", + dest="case_id", + help="Defines a subdirectory to the metrics output, so multiple" + + "cases can be compared", + required=False, + ) + + parser.add_argument( + "-v", + "--vars", + type=str, + nargs="+", + dest="vars", + help="Variables to use", + required=False, + ) + + parser.add_argument( + "--regions", + type=ast.literal_eval, + dest="regions", + help="Regions on which to run the metrics", + required=False, + ) + + parser.add_argument( + "--regions_values", + type=ast.literal_eval, + dest="regions_values", + help="Users can customize regions values names", + required=False, + ) + + parser.add_argument( + "-r", + "--reference_data_set", + type=str, + nargs="+", + dest="reference_data_set", + help="List of observations or models that are used as a " + + "reference against the test_data_set", + required=False, + ) + + parser.add_argument( + "--reference_data_path", + dest="reference_data_path", + help="Path for the reference climitologies", + required=False, + ) + + parser.add_argument( + "-t", + "--test_data_set", + type=str, + nargs="+", + dest="test_data_set", + help="List of observations or models to test " + + "against the reference_data_set", + required=False, + ) + + parser.add_argument( + "--test_data_path", + dest="test_data_path", + help="Path for the test climitologies", + required=False, + ) + + parser.add_argument( + "--target_grid", + dest="target_grid", + help='Options are "2.5x2.5" or an actual cdms2 grid object', + required=False, + ) + + parser.add_argument( + "--regrid_tool", + dest="regrid_tool", + help='Options are "regrid2" or "esmf"', + required=False, + ) + + parser.add_argument( + "--regrid_method", + dest="regrid_method", + help='Options are "linear" or "conservative", ' + + 'only if regrid_tool is "esmf"', + required=False, + ) + + parser.add_argument( + "--regrid_tool_ocn", + dest="regrid_tool_ocn", + help='Options are "regrid2" or "esmf"', + required=False, + ) + + parser.add_argument( + "--regrid_method_ocn", + dest="regrid_method_ocn", + help='Options are "linear" or "conservative", ' + + 'only if regrid_tool is "esmf"', + required=False, + ) + + parser.add_argument( + "--period", dest="period", help="A simulation parameter", required=False + ) + + parser.add_argument( + "--realization", + dest="realization", + help="A simulation parameter", + required=False, + ) + + parser.add_argument( + "--simulation_description_mapping", + type=ast.literal_eval, + dest="simulation_description_mapping", + help="List of observations or models to test " + + "against the reference_data_set", + default={}, + required=False, + ) + + parser.add_argument( + "--ext", dest="ext", help="Extension for the output files?", required=False + ) + + parser.add_argument( + "--dry_run", + # If input is 'True' or 'true', return True. Otherwise False. + type=lambda x: x.lower() == "true", + dest="dry_run", + help="True if output is to be created, False otherwise", + required=False, + ) + + parser.add_argument( + "--filename_template", + dest="filename_template", + help="Template for climatology files", + required=False, + ) + + parser.add_argument( + "--sftlf_filename_template", + dest="sftlf_filename_template", + help='Filename template for landsea masks ("sftlf")', + required=False, + ) + + parser.add_argument( + "--custom_observations", + dest="custom_observations", + help="Path to an alternative, custom observation file", + required=False, + ) + + parser.add_argument( + "--metrics_output_path", + dest="metrics_output_path", + help="Directory of where to put the results", + required=False, + ) + + parser.add_argument( + "--filename_output_template", + dest="filename_output_template", + help="Filename for the interpolated test climatologies", + required=False, + ) + + parser.add_argument( + "--save_test_clims", + # If input is 'True' or 'true', return True. Otherwise False. + type=lambda x: x.lower() == "true", + dest="save_test_clims", + help="True if to save interpolated test climatologies," + " otherwise False", + required=False, + ) + + parser.add_argument( + "--test_clims_interpolated_output", + dest="test_clims_interpolated_output", + help="Directory of where to put the interpolated " + "test climatologies", + required=False, + ) + + parser.add_argument( + "--output_json_template", + help="Filename template for results json files", + required=False, + ) + + parser.add_argument( + "--user_notes", + dest="user_notes", + help="Provide a short description to help identify this run of the PMP mean climate.", + required=False, + ) + + parser.add_argument( + "--cmec", + dest="cmec", + action="store_true", + help="Save metrics in CMEC format", + default=False, + required=False, + ) + + parser.add_argument( + "--no_cmec", + dest="cmec", + action="store_false", + help="Option to not save metrics in CMEC format", + default=False, + required=False, + ) + + return parser diff --git a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py deleted file mode 100644 index f0e247fc3..000000000 --- a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_driver.py +++ /dev/null @@ -1,498 +0,0 @@ -#!/usr/bin/env python -import ast -import json -import logging - -from pcmdi_metrics import LOG_LEVEL -from pcmdi_metrics.mean_climate.lib import pmp_parser -from pcmdi_metrics.mean_climate.lib.dataset import DataSet -from pcmdi_metrics.mean_climate.lib.model import Model -from pcmdi_metrics.mean_climate.lib.observation import Observation -from pcmdi_metrics.mean_climate.lib.outputmetrics import OutputMetrics - - -class PMPDriver(object): - def __init__(self, parameter): - plog = logging.getLogger("pcmdi_metrics") - plog.setLevel(LOG_LEVEL) - # create file handler which logs messages - formatter = logging.Formatter( - "%%(levelname)s::%%(asctime)s::%%(name)s::%s:: %%(message)s" - % (parameter.case_id), - datefmt="%Y-%m-%d %H:%M", - ) - for h in plog.handlers: - h.setFormatter(formatter) - - fh = logging.FileHandler("pcmdi_metrics_driver.%s.log" % (parameter.case_id)) - fh.setLevel(LOG_LEVEL) - formatter = logging.Formatter( - "%(levelname)s::%(asctime)s:: %(message)s", datefmt="%Y-%m-%d %H:%M" - ) - fh.setFormatter(formatter) - plog.addHandler(fh) - self.parameter = parameter - self.obs_dict = {} - self.regions_dict = {} - self.var = "" - self.output_metric = None - self.region = "" - self.sftlf = DataSet.create_sftlf(self.parameter) - self.default_regions = [] - self.regions_specs = {} - - def __call__(self): - self.run_diags() - - def run_diags(self): - """Runs the diagnostics. What did you think it did?""" - self.obs_dict = self.load_obs_dict() - self.regions_dict = self.create_regions_dict() - - for self.var_name_long in self.parameter.vars: - self.var = self.var_name_long.split("_")[0] - - if self.var not in self.obs_dict: - logging.getLogger("pcmdi_metrics").error( - "Variable %s not in obs_dict" % self.var - ) - continue - - for region in self.regions_dict[self.var]: - logging.getLogger("pcmdi_metrics").info("REGION: {}".format(region)) - self.region = self.create_region(region) - self.run_reference_and_test_comparison() - - def load_obs_dict(self): - """Loads obs_info_dictionary.json and appends - custom_observations from the parameter file if needed.""" - obs_file_name = "obs_info_dictionary.json" - obs_json_file = DataSet.load_path_as_file_obj( - obs_file_name - ) - obs_dict = json.loads(obs_json_file.read()) - obs_json_file.close() - - if hasattr(self.parameter, "custom_observations"): - # Can't use load_path_as_file_obj() b/c might not be in /share/ - cust_obs_json_file = open(self.parameter.custom_observations) - obs_dict.update(json.load(cust_obs_json_file)) - cust_obs_json_file.close() - return obs_dict - - def create_regions_dict(self): - """Creates a dict from self.default_regions.""" - self.load_default_regions_and_regions_specs() - - regions_dict = {} - for var_name_long in self.parameter.vars: - var = var_name_long.split("_")[0] - regions = self.parameter.regions - region = regions.get(var, self.default_regions) - if not isinstance(region, (list, tuple)): - region = [region] - if None in region: - region.remove(None) - for r in self.default_regions: - region.insert(0, r) - regions_dict[var] = region - - return regions_dict - - def load_default_regions_and_regions_specs(self): - """Gets the default_regions dict and regions_specs dict - from default_regions.py and stores them as attributes.""" - default_regions_file = ( - DataSet.load_path_as_file_obj( - "default_regions.py" - # "default_regions_xcdat.py" - ) - ) - exec( - compile( - open(default_regions_file.name).read(), - default_regions_file.name, - "exec", - ) - ) - default_regions_file.close() - try: - self.default_regions = locals()["default_regions"] - self.regions_specs = locals()["regions_specs"] - except KeyError: - logging.getLogger("pcmdi_metrics").error( - "Failed to open default_regions.py" - ) - - region_values = self.parameter.regions_values - region_values.update(getattr(self.parameter, "regions_values", {})) - # Now need to edit regions_specs - for region in region_values: - insert_dict = {"value": region_values[region]} - if region in self.regions_specs: - self.regions_specs[region].update(insert_dict) - else: - self.regions_specs[region] = insert_dict - self.regions_specs.update(getattr(self.parameter, "regions_specs", {})) - - def create_region(self, region): - """From the argument region, it gets that region from self.regions_specs - (which itself is loaded from default_regions.py)""" - if isinstance(region, str): - region_name = region - region = self.regions_specs.get( - region_name, self.regions_specs.get(region_name.lower()) - ) - region["id"] = region_name - elif region is None: - # It's okay if region == None - pass - else: - raise Exception("Unknown region: %s" % region) - return region - - def run_reference_and_test_comparison(self): - """Does the (obs or model) vs (obs or model) comparison.""" - reference_data_set = self.parameter.reference_data_set - test_data_set = self.parameter.test_data_set - - print('jwlee-test-0, test_data_set:', test_data_set) - - reference_data_set_is_obs = self.is_data_set_obs(reference_data_set) - test_data_set_is_obs = self.is_data_set_obs(test_data_set) - - # If either the reference or test are obs, the data sets - # themselves need to be modified. - if reference_data_set_is_obs: - reference_data_set = Observation.setup_obs_list_from_parameter( - reference_data_set, self.obs_dict, self.var - ) - if test_data_set_is_obs: - test_data_set = Observation.setup_obs_list_from_parameter( - test_data_set, self.obs_dict, self.var - ) - - print('jwlee-test-1, test_data_set:', test_data_set) - print('jwlee-test-1, test_data_set_is_obs:', test_data_set_is_obs) - - if len(reference_data_set) == 0: # We did not find any ref!!! - raise RuntimeError("No reference dataset found!") - - # self.reference/self.test are either an obs or model - for reference in reference_data_set: - try: - ref = self.determine_obs_or_model( - reference_data_set_is_obs, - reference, - self.parameter.reference_data_path, - ) - # TODO Make this a custom exception. This exception is for - # when a model doesn't have sftlf for a given region - except RuntimeError: - continue - - for test in test_data_set: - logging.getLogger("pcmdi_metrics").info("TEST DATA IS: {}".format(test)) - self.output_metric = OutputMetrics( - self.parameter, self.var_name_long, self.obs_dict, sftlf=self.sftlf - ) - self.output_metric.add_region(self.region) - try: - print('jwlee-test-1.5, test_data_set_is_obs, test, self.parameter.test_data_path:', test_data_set_is_obs, test, self.parameter.test_data_path) - tst = self.determine_obs_or_model( - test_data_set_is_obs, test, self.parameter.test_data_path - ) - self.output_metric.obs_or_model = tst.obs_or_model - # TODO Make this a custom exception. This exception is for - # when a model doesn't have sftlf for a given region - except RuntimeError: - continue - except Exception as err: - logging.getLogger("pcmdi_metrics").info( - "Unexpected error: {e}".format(e=err) - ) - break - - try: - print('jwlee-test-2: type(self), ref, tst:', type(self), ref, tst) - print('jwlee-test-2: self.var, self.var_name_long:', self.var, self.var_name_long) - print('jwlee-test-2: tst()[self.var].shape:', tst()[self.var].shape) - self.output_metric.calculate_and_output_metrics(ref, tst) - except RuntimeError: - continue - except Exception as err: - err_msg = ( - "Unexpected error in calculate output metrics: {e}".format( - e=err - ) - ) - logging.getLogger("pcmdi_metrics").info(err_msg) - break - - def is_data_set_obs(self, data_set): - """Is data_set (which is either a test or reference) an obs?""" - if "all" in data_set: - return True - data_set_is_obs = True - # If an element of data_set is not in the obs_dict, then - # data_set is a model. - for obs in data_set: - if obs not in self.obs_dict[self.var]: - data_set_is_obs = False - break - return data_set_is_obs - - def determine_obs_or_model(self, is_obs, ref_or_test, data_path): - print('jwlee-test-1.5-1: is_obs, ref_or_test, data_path:', is_obs, ref_or_test, data_path) - """Actually create Observation or Module object - based on if ref_or_test is an obs or model.""" - if is_obs: - logging.getLogger("pcmdi_metrics").info("%s is an obs" % ref_or_test) - return Observation( - self.parameter, - self.var_name_long, - self.region, - ref_or_test, - self.obs_dict, - data_path, - self.sftlf, - ) - else: - logging.getLogger("pcmdi_metrics").info("%s is a model" % ref_or_test) - return Model( - self.parameter, - self.var_name_long, - self.region, - ref_or_test, - self.obs_dict, - data_path, - self.sftlf, - ) - - -def create_mean_climate_parser(): - parser = pmp_parser.PMPMetricsParser() - parser.add_argument( - "--case_id", - dest="case_id", - help="Defines a subdirectory to the metrics output, so multiple" - + "cases can be compared", - required=False, - ) - - parser.add_argument( - "-v", - "--vars", - type=str, - nargs="+", - dest="vars", - help="Variables to use", - required=False, - ) - - parser.add_argument( - "--regions", - type=ast.literal_eval, - dest="regions", - help="Regions on which to run the metrics", - required=False, - ) - - parser.add_argument( - "--regions_values", - type=ast.literal_eval, - dest="regions_values", - help="Users can customize regions values names", - required=False, - ) - - parser.add_argument( - "-r", - "--reference_data_set", - type=str, - nargs="+", - dest="reference_data_set", - help="List of observations or models that are used as a " - + "reference against the test_data_set", - required=False, - ) - - parser.add_argument( - "--reference_data_path", - dest="reference_data_path", - help="Path for the reference climitologies", - required=False, - ) - - parser.add_argument( - "-t", - "--test_data_set", - type=str, - nargs="+", - dest="test_data_set", - help="List of observations or models to test " - + "against the reference_data_set", - required=False, - ) - - parser.add_argument( - "--test_data_path", - dest="test_data_path", - help="Path for the test climitologies", - required=False, - ) - - parser.add_argument( - "--target_grid", - dest="target_grid", - help='Options are "2.5x2.5" or an actual cdms2 grid object', - required=False, - ) - - parser.add_argument( - "--regrid_tool", - dest="regrid_tool", - help='Options are "regrid2" or "esmf"', - required=False, - ) - - parser.add_argument( - "--regrid_method", - dest="regrid_method", - help='Options are "linear" or "conservative", ' - + 'only if regrid_tool is "esmf"', - required=False, - ) - - parser.add_argument( - "--regrid_tool_ocn", - dest="regrid_tool_ocn", - help='Options are "regrid2" or "esmf"', - required=False, - ) - - parser.add_argument( - "--regrid_method_ocn", - dest="regrid_method_ocn", - help='Options are "linear" or "conservative", ' - + 'only if regrid_tool is "esmf"', - required=False, - ) - - parser.add_argument( - "--period", dest="period", help="A simulation parameter", required=False - ) - - parser.add_argument( - "--realization", - dest="realization", - help="A simulation parameter", - required=False, - ) - - parser.add_argument( - "--simulation_description_mapping", - type=ast.literal_eval, - dest="simulation_description_mapping", - help="List of observations or models to test " - + "against the reference_data_set", - default={}, - required=False, - ) - - parser.add_argument( - "--ext", dest="ext", help="Extension for the output files?", required=False - ) - - parser.add_argument( - "--dry_run", - # If input is 'True' or 'true', return True. Otherwise False. - type=lambda x: x.lower() == "true", - dest="dry_run", - help="True if output is to be created, False otherwise", - required=False, - ) - - parser.add_argument( - "--filename_template", - dest="filename_template", - help="Template for climatology files", - required=False, - ) - - parser.add_argument( - "--sftlf_filename_template", - dest="sftlf_filename_template", - help='Filename template for landsea masks ("sftlf")', - required=False, - ) - - parser.add_argument( - "--custom_observations", - dest="custom_observations", - help="Path to an alternative, custom observation file", - required=False, - ) - - parser.add_argument( - "--metrics_output_path", - dest="metrics_output_path", - help="Directory of where to put the results", - required=False, - ) - - parser.add_argument( - "--filename_output_template", - dest="filename_output_template", - help="Filename for the interpolated test climatologies", - required=False, - ) - - parser.add_argument( - "--save_test_clims", - # If input is 'True' or 'true', return True. Otherwise False. - type=lambda x: x.lower() == "true", - dest="save_test_clims", - help="True if to save interpolated test climatologies," + " otherwise False", - required=False, - ) - - parser.add_argument( - "--test_clims_interpolated_output", - dest="test_clims_interpolated_output", - help="Directory of where to put the interpolated " + "test climatologies", - required=False, - ) - - parser.add_argument( - "--output_json_template", - help="Filename template for results json files", - required=False, - ) - - parser.add_argument( - "--user_notes", - dest="user_notes", - help="Provide a short description to help identify this run of the PMP mean climate.", - required=False, - ) - - parser.add_argument( - "--cmec", - dest="cmec", - action="store_true", - help="Save metrics in CMEC format", - default=False, - required=False, - ) - - parser.add_argument( - "--no_cmec", - dest="cmec", - action="store_false", - help="Option to not save metrics in CMEC format", - default=False, - required=False, - ) - - return parser diff --git a/pcmdi_metrics/mean_climate/mean_climate_driver.py b/pcmdi_metrics/mean_climate/mean_climate_driver.py index 5429360d5..f0fe61238 100755 --- a/pcmdi_metrics/mean_climate/mean_climate_driver.py +++ b/pcmdi_metrics/mean_climate/mean_climate_driver.py @@ -1,7 +1,226 @@ #!/usr/bin/env python -from pcmdi_metrics.mean_climate.lib import PMPDriver, create_mean_climate_parser -parser = create_mean_climate_parser() -parameter = parser.get_parameter(cmd_default_vars=False, argparse_vals_only=False) -driver = PMPDriver(parameter) -driver.run_diags() +import json +import os +from re import split + +import cdms2 +import cdutil +import numpy as np +import xcdat + +from pcmdi_metrics import resources +from pcmdi_metrics.io import load_regions_specs, region_subset +from pcmdi_metrics.mean_climate.lib import ( + compute_metrics, + create_mean_climate_parser, + load_and_regrid, + mean_climate_metrics_to_json, +) +from pcmdi_metrics.variability_mode.lib import tree + + +def main(): + parser = create_mean_climate_parser() + parameter = parser.get_parameter(cmd_default_vars=False, argparse_vals_only=False) + + # parameters + case_id = parameter.case_id + test_data_set = parameter.test_data_set + realization = parameter.realization + vars = parameter.vars + reference_data_set = parameter.reference_data_set + target_grid = parameter.target_grid + regrid_tool = parameter.regrid_tool + regrid_method = parameter.regrid_method + regrid_tool_ocn = parameter.regrid_tool_ocn + save_test_clims = parameter.save_test_clims + test_clims_interpolated_output = parameter.test_clims_interpolated_output + filename_template = parameter.filename_template + sftlf_filename_template = parameter.sftlf_filename_template + generate_sftlf = parameter.generate_sftlf + regions = parameter.regions + test_data_path = parameter.test_data_path + reference_data_path = parameter.reference_data_path + metrics_output_path = parameter.metrics_output_path.replace('%(case_id)', case_id) + + cmec = False # temporary + + if realization is None: + realization = "" + elif isinstance(realization, str): + realization = [realization] + + debug = True + + print( + 'case_id: ', case_id, '\n', + 'test_data_set:', test_data_set, '\n', + 'realization:', realization, '\n', + 'vars:', vars, '\n', + 'reference_data_set:', reference_data_set, '\n', + 'target_grid:', target_grid, '\n', + 'regrid_tool:', regrid_tool, '\n', + 'regrid_method:', regrid_method, '\n', + 'regrid_tool_ocn:', regrid_tool_ocn, '\n', + 'save_test_clims:', save_test_clims, '\n', + 'test_clims_interpolated_output:', test_clims_interpolated_output, '\n', + 'filename_template:', filename_template, '\n', + 'sftlf_filename_template:', sftlf_filename_template, '\n', + 'generate_sftlf:', generate_sftlf, '\n', + 'regions:', regions, '\n', + 'test_data_path:', test_data_path, '\n', + 'reference_data_path:', reference_data_path, '\n', + 'metrics_output_path:', metrics_output_path, '\n') + + print('--- prepare mean climate metrics calculation ---') + + regions_specs = load_regions_specs() + default_regions = ['global', 'NHEX', 'SHEX', 'TROPICS'] + + # generate target grid + if target_grid == "2.5x2.5": + # target grid for regridding + t_grid = xcdat.create_uniform_grid(-88.875, 88.625, 2.5, 0, 357.5, 2.5) + if debug: + print('type(t_grid):', type(t_grid)) # Expected type is 'xarray.core.dataset.Dataset' + print('t_grid:', t_grid) + # identical target grid in cdms2 to use generateLandSeaMask function that is yet to exist in xcdat + t_grid_cdms2 = cdms2.createUniformGrid(-88.875, 72, 2.5, 0, 144, 2.5) + # generate land sea mask for the target grid + sft = cdutil.generateLandSeaMask(t_grid_cdms2) + if debug: + print('sft:', sft) + print('sft.getAxisList():', sft.getAxisList()) + # add sft to target grid dataset + t_grid['sftlf'] = (['lat', 'lon'], np.array(sft)) + if debug: + print('t_grid (after sftlf added):', t_grid) + t_grid.to_netcdf('target_grid.nc') + + # load obs catalogue json + egg_pth = resources.resource_path() + obs_file_name = "obs_info_dictionary.json" + obs_file_path = os.path.join(egg_pth, obs_file_name) + with open(obs_file_path) as fo: + obs_dict = json.loads(fo.read()) + # if debug: + # print('obs_dict:', json.dumps(obs_dict, indent=4, sort_keys=True)) + + # set dictionary for .json record + result_dict = tree() + + print('--- start mean climate metrics calculation ---') + + # ------------- + # variable loop + # ------------- + for var in vars: + + if '_' in var or '-' in var: + varname = split('_|-', var)[0] + level = float(split('_|-', var)[1]) + else: + varname = var + level = None + + if varname not in list(regions.keys()): + regions[varname] = default_regions + + print('varname:', varname) + print('level:', level) + + # ---------------- + # observation loop + # ---------------- + for ref in reference_data_set: + print('ref:', ref) + # identify data to load (annual cycle (AC) data is loading in) + ref_dataset_name = obs_dict[varname][ref] + ref_data_full_path = os.path.join( + reference_data_path, + obs_dict[varname][ref_dataset_name]["template"]) + print('ref_data_full_path:', ref_data_full_path) + # load data and regrid + ds_ref = load_and_regrid(ref_data_full_path, varname, level, t_grid, decode_times=False, regrid_tool=regrid_tool, debug=debug) + ds_ref_dict = dict() + + # ---------- + # model loop + # ---------- + for model in test_data_set: + print('model:', model) + for run in realization: + ds_test_dict = dict() + # identify data to load (annual cycle (AC) data is loading in) + test_data_full_path = os.path.join( + test_data_path, + filename_template.replace('%(variable)', varname).replace('%(model)', model).replace('%(realization)', run)) + # load data and regrid + ds_test = load_and_regrid(test_data_full_path, varname, level, t_grid, regrid_tool=regrid_tool, debug=debug) + + # ----------- + # region loop + # ----------- + for region in regions[varname]: + print('region:', region) + + # land/sea mask -- conduct masking only for variable data array, not entire data + if region.split('_')[0] in ['land', 'ocean']: + surface_type = region.split('_')[0] + ds_test_tmp = ds_test.copy(deep=True) + ds_ref_tmp = ds_ref.copy(deep=True) + if surface_type == 'land': + ds_test_tmp[varname] = ds_test[varname].where(t_grid['sftlf'] != 0.) + ds_ref_tmp[varname] = ds_ref[varname].where(t_grid['sftlf'] != 0.) + elif surface_type == 'ocean': + ds_test_tmp[varname] = ds_test[varname].where(t_grid['sftlf'] == 0.) + ds_ref_tmp[varname] = ds_ref[varname].where(t_grid['sftlf'] == 0.) + else: + ds_test_tmp = ds_test + ds_ref_tmp = ds_ref + print('mask done') + + # spatial subset + if region.lower() in ['global', 'land', 'ocean']: + ds_test_dict[region] = ds_test_tmp + if region not in list(ds_ref_dict.keys()): + ds_ref_dict[region] = ds_ref_tmp + else: + ds_test_tmp = region_subset(ds_test_tmp, regions_specs, region=region) + ds_test_dict[region] = ds_test_tmp + if region not in list(ds_ref_dict.keys()): + ds_ref_dict[region] = region_subset(ds_ref_tmp, regions_specs, region=region) + print('spatial subset done') + + if debug: + print('ds_test_tmp:', ds_test_tmp) + ds_test_tmp.to_netcdf('_'.join([var, 'model', region + '.nc'])) + + # compute metrics + print('compute metrics start') + result_dict["RESULTS"][model][ref][run][region] = compute_metrics(varname, ds_test_dict[region], ds_ref_dict[region]) + + # write individual JSON for single model (multi realizations if exist) / single obs (need to accumulate later) / single variable + json_filename_tmp = "_".join([model, var, target_grid, regrid_tool, regrid_method, "metrics"]) + mean_climate_metrics_to_json( + os.path.join(metrics_output_path, var), + json_filename_tmp, + result_dict, + model=model, + run=run, + cmec_flag=cmec, + ) + + # write collective JSON for all models / all obs / single variable + json_filename = "_".join([var, target_grid, regrid_tool, regrid_method, "metrics"]) + mean_climate_metrics_to_json( + metrics_output_path, + json_filename, + result_dict, + cmec_flag=cmec, + ) + + +if __name__ == "__main__": + main() diff --git a/pcmdi_metrics/mean_climate/mean_climate_driver_new.py b/pcmdi_metrics/mean_climate/mean_climate_driver_new.py deleted file mode 100755 index 5897b4051..000000000 --- a/pcmdi_metrics/mean_climate/mean_climate_driver_new.py +++ /dev/null @@ -1,290 +0,0 @@ -#!/usr/bin/env python - -import json -import os -from copy import deepcopy -from re import split - -import cdms2 -import cdutil -import numpy as np -import xcdat - -from pcmdi_metrics import resources -from pcmdi_metrics.io import load_regions_specs, region_subset, xcdat_open -from pcmdi_metrics.io.base import Base -from pcmdi_metrics.mean_climate.lib import compute_metrics, create_mean_climate_parser -from pcmdi_metrics.variability_mode.lib import tree - - -def main(): - parser = create_mean_climate_parser() - parameter = parser.get_parameter(cmd_default_vars=False, argparse_vals_only=False) - - # parameters - case_id = parameter.case_id - test_data_set = parameter.test_data_set - realization = parameter.realization - vars = parameter.vars - reference_data_set = parameter.reference_data_set - target_grid = parameter.target_grid - regrid_tool = parameter.regrid_tool - regrid_method = parameter.regrid_method - regrid_tool_ocn = parameter.regrid_tool_ocn - save_test_clims = parameter.save_test_clims - test_clims_interpolated_output = parameter.test_clims_interpolated_output - filename_template = parameter.filename_template - sftlf_filename_template = parameter.sftlf_filename_template - generate_sftlf = parameter.generate_sftlf - regions = parameter.regions - test_data_path = parameter.test_data_path - reference_data_path = parameter.reference_data_path - metrics_output_path = parameter.metrics_output_path.replace('%(case_id)', case_id) - - cmec = False # temporary - - if realization is None: - realization = "" - elif isinstance(realization, str): - realization = [realization] - - debug = True - - print( - 'case_id: ', case_id, '\n', - 'test_data_set:', test_data_set, '\n', - 'realization:', realization, '\n', - 'vars:', vars, '\n', - 'reference_data_set:', reference_data_set, '\n', - 'target_grid:', target_grid, '\n', - 'regrid_tool:', regrid_tool, '\n', - 'regrid_method:', regrid_method, '\n', - 'regrid_tool_ocn:', regrid_tool_ocn, '\n', - 'save_test_clims:', save_test_clims, '\n', - 'test_clims_interpolated_output:', test_clims_interpolated_output, '\n', - 'filename_template:', filename_template, '\n', - 'sftlf_filename_template:', sftlf_filename_template, '\n', - 'generate_sftlf:', generate_sftlf, '\n', - 'regions:', regions, '\n', - 'test_data_path:', test_data_path, '\n', - 'reference_data_path:', reference_data_path, '\n', - 'metrics_output_path:', metrics_output_path, '\n') - - print('--- prepare mean climate metrics calculation ---') - - regions_specs = load_regions_specs() - default_regions = ['global', 'NHEX', 'SHEX', 'TROPICS'] - - # generate target grid - if target_grid == "2.5x2.5": - # target grid for regridding - t_grid = xcdat.create_uniform_grid(-88.875, 88.625, 2.5, 0, 357.5, 2.5) - if debug: - print('type(t_grid):', type(t_grid)) # Expected type is 'xarray.core.dataset.Dataset' - print('t_grid:', t_grid) - # identical target grid in cdms2 to use generateLandSeaMask function that is yet to exist in xcdat - t_grid_cdms2 = cdms2.createUniformGrid(-88.875, 72, 2.5, 0, 144, 2.5) - # generate land sea mask for the target grid - sft = cdutil.generateLandSeaMask(t_grid_cdms2) - if debug: - print('sft:', sft) - print('sft.getAxisList():', sft.getAxisList()) - # add sft to target grid dataset - t_grid['sftlf'] = (['lat', 'lon'], np.array(sft)) - if debug: - print('t_grid (after sftlf added):', t_grid) - t_grid.to_netcdf('target_grid.nc') - - # load obs catalogue json - egg_pth = resources.resource_path() - obs_file_name = "obs_info_dictionary.json" - obs_file_path = os.path.join(egg_pth, obs_file_name) - with open(obs_file_path) as fo: - obs_dict = json.loads(fo.read()) - # if debug: - # print('obs_dict:', json.dumps(obs_dict, indent=4, sort_keys=True)) - - # set dictionary for .json record - result_dict = tree() - - print('--- start mean climate metrics calculation ---') - - # ------------- - # variable loop - # ------------- - for var in vars: - - if '_' in var or '-' in var: - varname = split('_|-', var)[0] - level = float(split('_|-', var)[1]) - else: - varname = var - level = None - - if varname not in list(regions.keys()): - regions[varname] = default_regions - - print('varname:', varname) - print('level:', level) - - # ---------------- - # observation loop - # ---------------- - for ref in reference_data_set: - print('ref:', ref) - # identify data to load (annual cycle (AC) data is loading in) - ref_dataset_name = obs_dict[varname][ref] - ref_data_full_path = os.path.join( - reference_data_path, - obs_dict[varname][ref_dataset_name]["template"]) - print('ref_data_full_path:', ref_data_full_path) - # load data and regrid - ds_ref = load_and_regrid(ref_data_full_path, varname, level, t_grid, decode_times=False, regrid_tool=regrid_tool, debug=debug) - ds_ref_dict = dict() - - # ---------- - # model loop - # ---------- - for model in test_data_set: - print('model:', model) - for run in realization: - ds_test_dict = dict() - # identify data to load (annual cycle (AC) data is loading in) - test_data_full_path = os.path.join( - test_data_path, - filename_template.replace('%(variable)', varname).replace('%(model)', model).replace('%(realization)', run)) - # load data and regrid - ds_test = load_and_regrid(test_data_full_path, varname, level, t_grid, regrid_tool=regrid_tool, debug=debug) - - # ----------- - # region loop - # ----------- - for region in regions[varname]: - print('region:', region) - - # land/sea mask -- conduct masking only for variable data array, not entire data - if region.split('_')[0] in ['land', 'ocean']: - surface_type = region.split('_')[0] - ds_test_tmp = ds_test.copy(deep=True) - ds_ref_tmp = ds_ref.copy(deep=True) - if surface_type == 'land': - ds_test_tmp[varname] = ds_test[varname].where(t_grid['sftlf'] != 0.) - ds_ref_tmp[varname] = ds_ref[varname].where(t_grid['sftlf'] != 0.) - elif surface_type == 'ocean': - ds_test_tmp[varname] = ds_test[varname].where(t_grid['sftlf'] == 0.) - ds_ref_tmp[varname] = ds_ref[varname].where(t_grid['sftlf'] == 0.) - else: - ds_test_tmp = ds_test - ds_ref_tmp = ds_ref - print('mask done') - - # spatial subset - if region.lower() in ['global', 'land', 'ocean']: - ds_test_dict[region] = ds_test_tmp - if region not in list(ds_ref_dict.keys()): - ds_ref_dict[region] = ds_ref_tmp - else: - ds_test_tmp = region_subset(ds_test_tmp, regions_specs, region=region) - ds_test_dict[region] = ds_test_tmp - if region not in list(ds_ref_dict.keys()): - ds_ref_dict[region] = region_subset(ds_ref_tmp, regions_specs, region=region) - print('spatial subset done') - - if debug: - print('ds_test_tmp:', ds_test_tmp) - ds_test_tmp.to_netcdf('_'.join([var, 'model', region + '.nc'])) - - # compute metrics - print('compute metrics start') - result_dict["RESULTS"][model][ref][run][region] = compute_metrics(varname, ds_test_dict[region], ds_ref_dict[region]) - - # write individual JSON for single model (multi realizations if exist) / single obs (need to accumulate later) / single variable - json_filename_tmp = "_".join([model, var, target_grid, regrid_tool, regrid_method, "metrics"]) - mean_climate_metrics_to_json( - os.path.join(metrics_output_path, var), - json_filename_tmp, - result_dict, - model=model, - run=run, - cmec_flag=cmec, - ) - - # write collective JSON for all models / all obs / single variable - json_filename = "_".join([var, target_grid, regrid_tool, regrid_method, "metrics"]) - mean_climate_metrics_to_json( - metrics_output_path, - json_filename, - result_dict, - cmec_flag=cmec, - ) - - -def load_and_regrid(data_path, varname, level=None, t_grid=None, decode_times=True, regrid_tool='regrid2', debug=False): - """Load data and regrid to target grid - - Args: - data_path (str): full data path for nc or xml file - varname (str): variable name - level (float): level to extract (unit in hPa) - t_grid (xarray.core.dataset.Dataset): target grid to regrid - decode_times (bool): Default is True. decode_times=False will be removed once obs4MIP written using xcdat - regrid_tool (str): Name of the regridding tool. See https://xcdat.readthedocs.io/en/stable/generated/xarray.Dataset.regridder.horizontal.html for more info - debug (bool): Default is False. If True, print more info to help debugging process - """ - # load data - ds = xcdat_open(data_path, data_var=varname, decode_times=decode_times) # NOTE: decode_times=False will be removed once obs4MIP written using xcdat - if level is not None: - level = level * 100 # hPa to Pa - ds = ds.sel(plev=level) - if debug: - print('ds:', ds) - # regrid - ds_regridded = ds.regridder.horizontal(varname, t_grid, tool=regrid_tool) - if debug: - print('ds_regridded:', ds_regridded) - return ds_regridded - - -def mean_climate_metrics_to_json( - outdir, json_filename, result_dict, model=None, run=None, cmec_flag=False -): - # Open JSON - JSON = Base( - outdir, json_filename - ) - # Dict for JSON - json_dict = deepcopy(result_dict) - if model is not None or run is not None: - # Preserve only needed dict branch -- delete rest keys - models_in_dict = list(json_dict["RESULTS"].keys()) - for m in models_in_dict: - if m == model: - runs_in_model_dict = list(json_dict["RESULTS"][m].keys()) - for r in runs_in_model_dict: - if r != run and run is not None: - del json_dict["RESULTS"][m][r] - else: - del json_dict["RESULTS"][m] - # Write selected dict to JSON - JSON.write( - json_dict, - json_structure=[ - "model", - "reference", - "rip", - "region", - "statistic", - "season", - ], - indent=4, - separators=(",", ": "), - mode="r+", - sort_keys=True, - ) - if cmec_flag: - print("Writing cmec file") - JSON.write_cmec(indent=4, separators=(",", ": ")) - - -if __name__ == "__main__": - main() diff --git a/pcmdi_metrics/version.py b/pcmdi_metrics/version.py index 1bde6ee0c..7c3da6851 100644 --- a/pcmdi_metrics/version.py +++ b/pcmdi_metrics/version.py @@ -1,3 +1,3 @@ __version__ = 'v2.3.1' -__git_tag_describe__ = 'v2.3.1-73-g400f54f' -__git_sha1__ = '400f54f135b248bd9f097b1b3ae8296393fe4ff7' +__git_tag_describe__ = 'v2.3.1-75-ge8b8a29' +__git_sha1__ = 'e8b8a29293daacacec78ac1baee2f4c2f4cd3442' diff --git a/setup.py b/setup.py index 739bcc84b..5d53b57a1 100755 --- a/setup.py +++ b/setup.py @@ -45,7 +45,6 @@ scripts = [ "pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py", "pcmdi_metrics/mean_climate/mean_climate_driver.py", - "pcmdi_metrics/mean_climate/mean_climate_driver_new.py", "pcmdi_metrics/monsoon_wang/scripts/mpindex_compute.py", "pcmdi_metrics/monsoon_sperber/scripts/driver_monsoon_sperber.py", "pcmdi_metrics/mjo/mjo_metrics_driver.py", From 64698f8f4b94aca6ba7f17ee744e8c48c26bdbbf Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 8 Dec 2022 17:32:46 -0800 Subject: [PATCH 065/130] some re-org --- .../mean_climate/deprecated/lib/__init__.py | 25 + .../lib/mean_climate_metrics_driver.py | 498 ++++++++++++++++++ .../mean_climate/lib/load_and_regrid.py | 27 + .../lib/mean_climate_metrics_to_json.py | 44 ++ 4 files changed, 594 insertions(+) create mode 100644 pcmdi_metrics/mean_climate/deprecated/lib/__init__.py create mode 100644 pcmdi_metrics/mean_climate/deprecated/lib/mean_climate_metrics_driver.py create mode 100644 pcmdi_metrics/mean_climate/lib/load_and_regrid.py create mode 100644 pcmdi_metrics/mean_climate/lib/mean_climate_metrics_to_json.py diff --git a/pcmdi_metrics/mean_climate/deprecated/lib/__init__.py b/pcmdi_metrics/mean_climate/deprecated/lib/__init__.py new file mode 100644 index 000000000..5d395052c --- /dev/null +++ b/pcmdi_metrics/mean_climate/deprecated/lib/__init__.py @@ -0,0 +1,25 @@ +from .compute_statistics import ( # noqa + annual_mean, + bias_xy, + cor_xy, + mean_xy, + meanabs_xy, + rms_0, + rms_xy, + rms_xyt, + rmsc_xy, + seasonal_mean, + std_xy, + std_xyt, + zonal_mean, +) +from .mean_climate_metrics_calculations import compute_metrics # noqa +from .mean_climate_metrics_driver import PMPDriver, create_mean_climate_parser # noqa + +from . import dataset # DataSet # noqa # isort:skip +from . import io # noqa # isort:skip +from . import model # Model # noqa # isort:skip +from . import observation # OBS, Observation # noqa # isort:skip +from . import outputmetrics # OutputMetrics # noqa # isort:skip +from . import pmp_parameter # PMPParameter, PMPMetricsParameter # noqa # isort:skip +from . import pmp_parser # PMPParser, PMPMetricsParser # noqa # isort:skip diff --git a/pcmdi_metrics/mean_climate/deprecated/lib/mean_climate_metrics_driver.py b/pcmdi_metrics/mean_climate/deprecated/lib/mean_climate_metrics_driver.py new file mode 100644 index 000000000..f0e247fc3 --- /dev/null +++ b/pcmdi_metrics/mean_climate/deprecated/lib/mean_climate_metrics_driver.py @@ -0,0 +1,498 @@ +#!/usr/bin/env python +import ast +import json +import logging + +from pcmdi_metrics import LOG_LEVEL +from pcmdi_metrics.mean_climate.lib import pmp_parser +from pcmdi_metrics.mean_climate.lib.dataset import DataSet +from pcmdi_metrics.mean_climate.lib.model import Model +from pcmdi_metrics.mean_climate.lib.observation import Observation +from pcmdi_metrics.mean_climate.lib.outputmetrics import OutputMetrics + + +class PMPDriver(object): + def __init__(self, parameter): + plog = logging.getLogger("pcmdi_metrics") + plog.setLevel(LOG_LEVEL) + # create file handler which logs messages + formatter = logging.Formatter( + "%%(levelname)s::%%(asctime)s::%%(name)s::%s:: %%(message)s" + % (parameter.case_id), + datefmt="%Y-%m-%d %H:%M", + ) + for h in plog.handlers: + h.setFormatter(formatter) + + fh = logging.FileHandler("pcmdi_metrics_driver.%s.log" % (parameter.case_id)) + fh.setLevel(LOG_LEVEL) + formatter = logging.Formatter( + "%(levelname)s::%(asctime)s:: %(message)s", datefmt="%Y-%m-%d %H:%M" + ) + fh.setFormatter(formatter) + plog.addHandler(fh) + self.parameter = parameter + self.obs_dict = {} + self.regions_dict = {} + self.var = "" + self.output_metric = None + self.region = "" + self.sftlf = DataSet.create_sftlf(self.parameter) + self.default_regions = [] + self.regions_specs = {} + + def __call__(self): + self.run_diags() + + def run_diags(self): + """Runs the diagnostics. What did you think it did?""" + self.obs_dict = self.load_obs_dict() + self.regions_dict = self.create_regions_dict() + + for self.var_name_long in self.parameter.vars: + self.var = self.var_name_long.split("_")[0] + + if self.var not in self.obs_dict: + logging.getLogger("pcmdi_metrics").error( + "Variable %s not in obs_dict" % self.var + ) + continue + + for region in self.regions_dict[self.var]: + logging.getLogger("pcmdi_metrics").info("REGION: {}".format(region)) + self.region = self.create_region(region) + self.run_reference_and_test_comparison() + + def load_obs_dict(self): + """Loads obs_info_dictionary.json and appends + custom_observations from the parameter file if needed.""" + obs_file_name = "obs_info_dictionary.json" + obs_json_file = DataSet.load_path_as_file_obj( + obs_file_name + ) + obs_dict = json.loads(obs_json_file.read()) + obs_json_file.close() + + if hasattr(self.parameter, "custom_observations"): + # Can't use load_path_as_file_obj() b/c might not be in /share/ + cust_obs_json_file = open(self.parameter.custom_observations) + obs_dict.update(json.load(cust_obs_json_file)) + cust_obs_json_file.close() + return obs_dict + + def create_regions_dict(self): + """Creates a dict from self.default_regions.""" + self.load_default_regions_and_regions_specs() + + regions_dict = {} + for var_name_long in self.parameter.vars: + var = var_name_long.split("_")[0] + regions = self.parameter.regions + region = regions.get(var, self.default_regions) + if not isinstance(region, (list, tuple)): + region = [region] + if None in region: + region.remove(None) + for r in self.default_regions: + region.insert(0, r) + regions_dict[var] = region + + return regions_dict + + def load_default_regions_and_regions_specs(self): + """Gets the default_regions dict and regions_specs dict + from default_regions.py and stores them as attributes.""" + default_regions_file = ( + DataSet.load_path_as_file_obj( + "default_regions.py" + # "default_regions_xcdat.py" + ) + ) + exec( + compile( + open(default_regions_file.name).read(), + default_regions_file.name, + "exec", + ) + ) + default_regions_file.close() + try: + self.default_regions = locals()["default_regions"] + self.regions_specs = locals()["regions_specs"] + except KeyError: + logging.getLogger("pcmdi_metrics").error( + "Failed to open default_regions.py" + ) + + region_values = self.parameter.regions_values + region_values.update(getattr(self.parameter, "regions_values", {})) + # Now need to edit regions_specs + for region in region_values: + insert_dict = {"value": region_values[region]} + if region in self.regions_specs: + self.regions_specs[region].update(insert_dict) + else: + self.regions_specs[region] = insert_dict + self.regions_specs.update(getattr(self.parameter, "regions_specs", {})) + + def create_region(self, region): + """From the argument region, it gets that region from self.regions_specs + (which itself is loaded from default_regions.py)""" + if isinstance(region, str): + region_name = region + region = self.regions_specs.get( + region_name, self.regions_specs.get(region_name.lower()) + ) + region["id"] = region_name + elif region is None: + # It's okay if region == None + pass + else: + raise Exception("Unknown region: %s" % region) + return region + + def run_reference_and_test_comparison(self): + """Does the (obs or model) vs (obs or model) comparison.""" + reference_data_set = self.parameter.reference_data_set + test_data_set = self.parameter.test_data_set + + print('jwlee-test-0, test_data_set:', test_data_set) + + reference_data_set_is_obs = self.is_data_set_obs(reference_data_set) + test_data_set_is_obs = self.is_data_set_obs(test_data_set) + + # If either the reference or test are obs, the data sets + # themselves need to be modified. + if reference_data_set_is_obs: + reference_data_set = Observation.setup_obs_list_from_parameter( + reference_data_set, self.obs_dict, self.var + ) + if test_data_set_is_obs: + test_data_set = Observation.setup_obs_list_from_parameter( + test_data_set, self.obs_dict, self.var + ) + + print('jwlee-test-1, test_data_set:', test_data_set) + print('jwlee-test-1, test_data_set_is_obs:', test_data_set_is_obs) + + if len(reference_data_set) == 0: # We did not find any ref!!! + raise RuntimeError("No reference dataset found!") + + # self.reference/self.test are either an obs or model + for reference in reference_data_set: + try: + ref = self.determine_obs_or_model( + reference_data_set_is_obs, + reference, + self.parameter.reference_data_path, + ) + # TODO Make this a custom exception. This exception is for + # when a model doesn't have sftlf for a given region + except RuntimeError: + continue + + for test in test_data_set: + logging.getLogger("pcmdi_metrics").info("TEST DATA IS: {}".format(test)) + self.output_metric = OutputMetrics( + self.parameter, self.var_name_long, self.obs_dict, sftlf=self.sftlf + ) + self.output_metric.add_region(self.region) + try: + print('jwlee-test-1.5, test_data_set_is_obs, test, self.parameter.test_data_path:', test_data_set_is_obs, test, self.parameter.test_data_path) + tst = self.determine_obs_or_model( + test_data_set_is_obs, test, self.parameter.test_data_path + ) + self.output_metric.obs_or_model = tst.obs_or_model + # TODO Make this a custom exception. This exception is for + # when a model doesn't have sftlf for a given region + except RuntimeError: + continue + except Exception as err: + logging.getLogger("pcmdi_metrics").info( + "Unexpected error: {e}".format(e=err) + ) + break + + try: + print('jwlee-test-2: type(self), ref, tst:', type(self), ref, tst) + print('jwlee-test-2: self.var, self.var_name_long:', self.var, self.var_name_long) + print('jwlee-test-2: tst()[self.var].shape:', tst()[self.var].shape) + self.output_metric.calculate_and_output_metrics(ref, tst) + except RuntimeError: + continue + except Exception as err: + err_msg = ( + "Unexpected error in calculate output metrics: {e}".format( + e=err + ) + ) + logging.getLogger("pcmdi_metrics").info(err_msg) + break + + def is_data_set_obs(self, data_set): + """Is data_set (which is either a test or reference) an obs?""" + if "all" in data_set: + return True + data_set_is_obs = True + # If an element of data_set is not in the obs_dict, then + # data_set is a model. + for obs in data_set: + if obs not in self.obs_dict[self.var]: + data_set_is_obs = False + break + return data_set_is_obs + + def determine_obs_or_model(self, is_obs, ref_or_test, data_path): + print('jwlee-test-1.5-1: is_obs, ref_or_test, data_path:', is_obs, ref_or_test, data_path) + """Actually create Observation or Module object + based on if ref_or_test is an obs or model.""" + if is_obs: + logging.getLogger("pcmdi_metrics").info("%s is an obs" % ref_or_test) + return Observation( + self.parameter, + self.var_name_long, + self.region, + ref_or_test, + self.obs_dict, + data_path, + self.sftlf, + ) + else: + logging.getLogger("pcmdi_metrics").info("%s is a model" % ref_or_test) + return Model( + self.parameter, + self.var_name_long, + self.region, + ref_or_test, + self.obs_dict, + data_path, + self.sftlf, + ) + + +def create_mean_climate_parser(): + parser = pmp_parser.PMPMetricsParser() + parser.add_argument( + "--case_id", + dest="case_id", + help="Defines a subdirectory to the metrics output, so multiple" + + "cases can be compared", + required=False, + ) + + parser.add_argument( + "-v", + "--vars", + type=str, + nargs="+", + dest="vars", + help="Variables to use", + required=False, + ) + + parser.add_argument( + "--regions", + type=ast.literal_eval, + dest="regions", + help="Regions on which to run the metrics", + required=False, + ) + + parser.add_argument( + "--regions_values", + type=ast.literal_eval, + dest="regions_values", + help="Users can customize regions values names", + required=False, + ) + + parser.add_argument( + "-r", + "--reference_data_set", + type=str, + nargs="+", + dest="reference_data_set", + help="List of observations or models that are used as a " + + "reference against the test_data_set", + required=False, + ) + + parser.add_argument( + "--reference_data_path", + dest="reference_data_path", + help="Path for the reference climitologies", + required=False, + ) + + parser.add_argument( + "-t", + "--test_data_set", + type=str, + nargs="+", + dest="test_data_set", + help="List of observations or models to test " + + "against the reference_data_set", + required=False, + ) + + parser.add_argument( + "--test_data_path", + dest="test_data_path", + help="Path for the test climitologies", + required=False, + ) + + parser.add_argument( + "--target_grid", + dest="target_grid", + help='Options are "2.5x2.5" or an actual cdms2 grid object', + required=False, + ) + + parser.add_argument( + "--regrid_tool", + dest="regrid_tool", + help='Options are "regrid2" or "esmf"', + required=False, + ) + + parser.add_argument( + "--regrid_method", + dest="regrid_method", + help='Options are "linear" or "conservative", ' + + 'only if regrid_tool is "esmf"', + required=False, + ) + + parser.add_argument( + "--regrid_tool_ocn", + dest="regrid_tool_ocn", + help='Options are "regrid2" or "esmf"', + required=False, + ) + + parser.add_argument( + "--regrid_method_ocn", + dest="regrid_method_ocn", + help='Options are "linear" or "conservative", ' + + 'only if regrid_tool is "esmf"', + required=False, + ) + + parser.add_argument( + "--period", dest="period", help="A simulation parameter", required=False + ) + + parser.add_argument( + "--realization", + dest="realization", + help="A simulation parameter", + required=False, + ) + + parser.add_argument( + "--simulation_description_mapping", + type=ast.literal_eval, + dest="simulation_description_mapping", + help="List of observations or models to test " + + "against the reference_data_set", + default={}, + required=False, + ) + + parser.add_argument( + "--ext", dest="ext", help="Extension for the output files?", required=False + ) + + parser.add_argument( + "--dry_run", + # If input is 'True' or 'true', return True. Otherwise False. + type=lambda x: x.lower() == "true", + dest="dry_run", + help="True if output is to be created, False otherwise", + required=False, + ) + + parser.add_argument( + "--filename_template", + dest="filename_template", + help="Template for climatology files", + required=False, + ) + + parser.add_argument( + "--sftlf_filename_template", + dest="sftlf_filename_template", + help='Filename template for landsea masks ("sftlf")', + required=False, + ) + + parser.add_argument( + "--custom_observations", + dest="custom_observations", + help="Path to an alternative, custom observation file", + required=False, + ) + + parser.add_argument( + "--metrics_output_path", + dest="metrics_output_path", + help="Directory of where to put the results", + required=False, + ) + + parser.add_argument( + "--filename_output_template", + dest="filename_output_template", + help="Filename for the interpolated test climatologies", + required=False, + ) + + parser.add_argument( + "--save_test_clims", + # If input is 'True' or 'true', return True. Otherwise False. + type=lambda x: x.lower() == "true", + dest="save_test_clims", + help="True if to save interpolated test climatologies," + " otherwise False", + required=False, + ) + + parser.add_argument( + "--test_clims_interpolated_output", + dest="test_clims_interpolated_output", + help="Directory of where to put the interpolated " + "test climatologies", + required=False, + ) + + parser.add_argument( + "--output_json_template", + help="Filename template for results json files", + required=False, + ) + + parser.add_argument( + "--user_notes", + dest="user_notes", + help="Provide a short description to help identify this run of the PMP mean climate.", + required=False, + ) + + parser.add_argument( + "--cmec", + dest="cmec", + action="store_true", + help="Save metrics in CMEC format", + default=False, + required=False, + ) + + parser.add_argument( + "--no_cmec", + dest="cmec", + action="store_false", + help="Option to not save metrics in CMEC format", + default=False, + required=False, + ) + + return parser diff --git a/pcmdi_metrics/mean_climate/lib/load_and_regrid.py b/pcmdi_metrics/mean_climate/lib/load_and_regrid.py new file mode 100644 index 000000000..4aa97b894 --- /dev/null +++ b/pcmdi_metrics/mean_climate/lib/load_and_regrid.py @@ -0,0 +1,27 @@ +from pcmdi_metrics.io import xcdat_open + + +def load_and_regrid(data_path, varname, level=None, t_grid=None, decode_times=True, regrid_tool='regrid2', debug=False): + """Load data and regrid to target grid + + Args: + data_path (str): full data path for nc or xml file + varname (str): variable name + level (float): level to extract (unit in hPa) + t_grid (xarray.core.dataset.Dataset): target grid to regrid + decode_times (bool): Default is True. decode_times=False will be removed once obs4MIP written using xcdat + regrid_tool (str): Name of the regridding tool. See https://xcdat.readthedocs.io/en/stable/generated/xarray.Dataset.regridder.horizontal.html for more info + debug (bool): Default is False. If True, print more info to help debugging process + """ + # load data + ds = xcdat_open(data_path, data_var=varname, decode_times=decode_times) # NOTE: decode_times=False will be removed once obs4MIP written using xcdat + if level is not None: + level = level * 100 # hPa to Pa + ds = ds.sel(plev=level) + if debug: + print('ds:', ds) + # regrid + ds_regridded = ds.regridder.horizontal(varname, t_grid, tool=regrid_tool) + if debug: + print('ds_regridded:', ds_regridded) + return ds_regridded diff --git a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_to_json.py b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_to_json.py new file mode 100644 index 000000000..34fe4bf93 --- /dev/null +++ b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_to_json.py @@ -0,0 +1,44 @@ +from copy import deepcopy + +from pcmdi_metrics.io.base import Base + + +def mean_climate_metrics_to_json( + outdir, json_filename, result_dict, model=None, run=None, cmec_flag=False +): + # Open JSON + JSON = Base( + outdir, json_filename + ) + # Dict for JSON + json_dict = deepcopy(result_dict) + if model is not None or run is not None: + # Preserve only needed dict branch -- delete rest keys + models_in_dict = list(json_dict["RESULTS"].keys()) + for m in models_in_dict: + if m == model: + runs_in_model_dict = list(json_dict["RESULTS"][m].keys()) + for r in runs_in_model_dict: + if r != run and run is not None: + del json_dict["RESULTS"][m][r] + else: + del json_dict["RESULTS"][m] + # Write selected dict to JSON + JSON.write( + json_dict, + json_structure=[ + "model", + "reference", + "rip", + "region", + "statistic", + "season", + ], + indent=4, + separators=(",", ": "), + mode="r+", + sort_keys=True, + ) + if cmec_flag: + print("Writing cmec file") + JSON.write_cmec(indent=4, separators=(",", ": ")) From ce13b15888790a44126af2a4b15fd2823fe25f3c Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 8 Dec 2022 17:49:14 -0800 Subject: [PATCH 066/130] clean up --- pcmdi_metrics/io/base.py | 30 +----------------------------- 1 file changed, 1 insertion(+), 29 deletions(-) diff --git a/pcmdi_metrics/io/base.py b/pcmdi_metrics/io/base.py index 177572197..b06f8c49d 100755 --- a/pcmdi_metrics/io/base.py +++ b/pcmdi_metrics/io/base.py @@ -349,7 +349,6 @@ def get_dimensions(json_dict, json_structure): ) def get(self, var, var_in_file=None, region={}, *args, **kwargs): - print('jwlee-test-get, var, var_in_file:', var, var_in_file) self.variable = var self.var_from_file = self.extract_var_from_file( var, var_in_file, *args, **kwargs @@ -379,7 +378,6 @@ def extract_var_from_file(self, var, var_in_file, *args, **kwargs): ds = xcdat_open(self(), data_var=var_in_file, decode_times=False) # Temporary part to read in cdms written obs4MIP AC files if 'level' in list(kwargs.keys()): - print("jwlee-test extract_var_from_file kwargs['level']:", kwargs['level']) level = kwargs['level'] ds = ds.sel(plev=level) @@ -398,13 +396,6 @@ def mask_var(self, var): self: var: """ - print('jwlee-test-mask_var, self, var:', self, var) - print('jwlee-test-mask_var, type(self)', type(self)) - print('jwlee-test-mask_var, type(var)', type(var)) - print('jwlee-test-mask_var, self.mask', self.mask) - print('jwlee-test-mask_var, type(self.mask)', type(self.mask)) # cdms2.tvariable.TransientVariable - print('jwlee-test-mask_var, self.mask.shape', self.mask.shape) - print("jwlee-test-mask_var, tuple(var.dims[d] for d in ['lat', 'lon']):", tuple(var.dims[d] for d in ['lat', 'lon'])) var_shape = tuple(var.dims[d] for d in ['lat', 'lon']) if self.mask is None: @@ -423,19 +414,8 @@ def set_target_grid_and_mask_in_var(self, var, var_in_file): self: object self(): string, path to input file """ - print('jwlee-test-regrid, set_target_grid_and_mask_in_var start') if self.target_grid is not None: - print('jwlee-test-regrid, type(self):', type(self)) - print('jwlee-test-regrid, type(self()):', type(self())) - print('jwlee-test-regrid, self():', self()) - print('jwlee-test-regrid, regridder start, var_in_file:', var_in_file) - var.to_netcdf(self().split('/')[-1].split('.nc')[0] + '_test1-org.nc') var = var.regridder.horizontal(var_in_file, self.target_grid, tool=self.regrid_tool) - print('jwlee-test-regrid, regridder done') - var.to_netcdf(self().split('/')[-1].split('.nc')[0] + '_test2-regridded.nc') - print('jwlee-test-regrid-2, var[var_in_file].shape:', var[var_in_file].shape) - print('jwlee-test-regrid-3, self.target_mask:', self.target_mask) - if self.target_mask is not None: # if self.target_mask.shape != var.shape: if self.target_mask.shape != var[var_in_file].shape: @@ -443,9 +423,6 @@ def set_target_grid_and_mask_in_var(self, var, var_in_file): else: mask = self.target_mask var = MV2.masked_where(mask, var) - - print('jwlee-test-regrid-4, set_target_grid_and_mask_in_var done') - return var def set_domain_in_var(self, var, region): @@ -472,12 +449,8 @@ def set_file_mask_template(self): def get_mask_from_var(self, var): try: - print('jwlee-test-get_mask_from_var start') - #o_mask = self.file_mask_template.get("sftlf") + # o_mask = self.file_mask_template.get("sftlf") o_mask = self.file_mask_template.get("sftlf", var_in_file="sftlf") - print('jwlee-test-get_mask_from_var, self.file_mask_template:', self.file_mask_template) - print('jwlee-test-get_mask_from_var, type(o_mask):', type(o_mask)) - print('jwlee-test-get_mask_from_var, o_mask.shape:', o_mask.shape) except Exception: o_mask = ( cdutil.generateLandSeaMask(var, regridTool=self.regrid_tool).filled(1.0) @@ -492,7 +465,6 @@ def set_target_grid(self, target, regrid_tool="esmf", regrid_method="linear"): self.regrid_tool = regrid_tool self.regrid_method = regrid_method if target == "2.5x2.5": - print('jwlee-test, set_target_grid, start') # self.target_grid = cdms2.createUniformGrid(-88.875, 72, 2.5, 0, 144, 2.5) self.target_grid = xcdat.create_uniform_grid(-88.875, 88.625, 2.5, 0, 357.5, 2.5) self.target_grid_name = target From 308ee276752e3ec1ad90b13dfc8c324210145b40 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 8 Dec 2022 17:50:41 -0800 Subject: [PATCH 067/130] clean up --- pcmdi_metrics/io/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pcmdi_metrics/io/base.py b/pcmdi_metrics/io/base.py index b06f8c49d..5dab0621c 100755 --- a/pcmdi_metrics/io/base.py +++ b/pcmdi_metrics/io/base.py @@ -401,7 +401,7 @@ def mask_var(self, var): if self.mask is None: self.set_file_mask_template() self.mask = self.get_mask_from_var(var) - #if self.mask.shape != var.shape: + # if self.mask.shape != var.shape: if self.mask.shape != var_shape: dummy, mask = genutil.grower(var, self.mask) else: From 19bf52b5e562d8121e266cbb32b7e0ea3a3c879c Mon Sep 17 00:00:00 2001 From: lee1043 Date: Fri, 9 Dec 2022 21:04:12 -0800 Subject: [PATCH 068/130] clean up --- pcmdi_metrics/mean_climate/lib/compute_metrics.py | 10 ++++------ pcmdi_metrics/mean_climate/lib/compute_statistics.py | 10 +++++----- pcmdi_metrics/mean_climate/mean_climate_driver.py | 5 +++-- pcmdi_metrics/version.py | 4 ++-- 4 files changed, 14 insertions(+), 15 deletions(-) diff --git a/pcmdi_metrics/mean_climate/lib/compute_metrics.py b/pcmdi_metrics/mean_climate/lib/compute_metrics.py index e662f10eb..bfee1e0b2 100644 --- a/pcmdi_metrics/mean_climate/lib/compute_metrics.py +++ b/pcmdi_metrics/mean_climate/lib/compute_metrics.py @@ -105,14 +105,14 @@ def compute_metrics(Var, dm, do): print('compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS') dm_am_devzm = dm_am - dm_amzm do_am_devzm = do_am - do_amzm - rms_xy_devzm = pcmdi_metrics.mean_climate.lib.rms_xy(dm_am_devzm, do_am_devzm, var) + rms_xy_devzm = pcmdi_metrics.mean_climate.lib.rms_xy(dm_am_devzm, do_am_devzm, var, weights=dm.spatial.get_weights(axis=['X', 'Y'])) # CALCULATE ANNUAL AND ZONAL MEAN STD # CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD print('compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD') - stdObs_xy_devzm = pcmdi_metrics.mean_climate.lib.std_xy(do_am_devzm, var) - std_xy_devzm = pcmdi_metrics.mean_climate.lib.std_xy(dm_am_devzm, var) + stdObs_xy_devzm = pcmdi_metrics.mean_climate.lib.std_xy(do_am_devzm, var, weights=do.spatial.get_weights(axis=['X', 'Y'])) + std_xy_devzm = pcmdi_metrics.mean_climate.lib.std_xy(dm_am_devzm, var, weights=dm.spatial.get_weights(axis=['X', 'Y'])) for stat in [ "std-obs_xy", @@ -182,9 +182,7 @@ def compute_metrics(Var, dm, do): metrics_dictionary["mae_xy"][sea] = format(mae_sea * conv, sig_digits) metrics_dictionary["std-obs_xy"][sea] = format(stdObs_xy_sea * conv, sig_digits) metrics_dictionary["std_xy"][sea] = format(std_xy_sea * conv, sig_digits) - metrics_dictionary["mean-obs_xy"][sea] = format( - meanObs_xy_sea * conv, sig_digits - ) + metrics_dictionary["mean-obs_xy"][sea] = format(meanObs_xy_sea * conv, sig_digits) metrics_dictionary["mean_xy"][sea] = format(mean_xy_sea * conv, sig_digits) rms_mo_l = [] diff --git a/pcmdi_metrics/mean_climate/lib/compute_statistics.py b/pcmdi_metrics/mean_climate/lib/compute_statistics.py index 09fec66bb..a5abdfcaa 100644 --- a/pcmdi_metrics/mean_climate/lib/compute_statistics.py +++ b/pcmdi_metrics/mean_climate/lib/compute_statistics.py @@ -169,7 +169,7 @@ def rms_0(dm, do, var=None, weighted=True): return float(stat) -def rms_xy(dm, do, var=None): +def rms_xy(dm, do, var=None, weights=None): """Computes rms""" if dm is None and do is None: # just want the doc return { @@ -178,7 +178,8 @@ def rms_xy(dm, do, var=None): "Contact": "pcmdi-metrics@llnl.gov", } dif_square = (dm[var] - do[var])**2 - weights = dm.spatial.get_weights(axis=['X', 'Y']) + if weights is None: + weights = dm.spatial.get_weights(axis=['X', 'Y']) stat = math.sqrt(dif_square.weighted(weights).mean(("lon", "lat"))) return float(stat) @@ -225,11 +226,10 @@ def std_xy(d, var=None, weights=None): "Abstract": "Compute Spatial Standard Deviation", "Contact": "pcmdi-metrics@llnl.gov", } - - average = float(d.spatial.average(var, axis=['X', 'Y'])[var].values) - anomaly = (d[var] - average)**2 if weights is None: weights = d.spatial.get_weights(axis=['X', 'Y']) + average = float(d[var].weighted(weights).mean(("lon", "lat"))) + anomaly = (d[var] - average)**2 variance = float(anomaly.weighted(weights).mean(("lon", "lat"))) std = math.sqrt(variance) return float(std) diff --git a/pcmdi_metrics/mean_climate/mean_climate_driver.py b/pcmdi_metrics/mean_climate/mean_climate_driver.py index f0fe61238..8e9002ea4 100755 --- a/pcmdi_metrics/mean_climate/mean_climate_driver.py +++ b/pcmdi_metrics/mean_climate/mean_climate_driver.py @@ -201,7 +201,8 @@ def main(): print('compute metrics start') result_dict["RESULTS"][model][ref][run][region] = compute_metrics(varname, ds_test_dict[region], ds_ref_dict[region]) - # write individual JSON for single model (multi realizations if exist) / single obs (need to accumulate later) / single variable + # write individual JSON + # --- single model (multi realizations if exist) / single obs (need to accumulate later) / single variable json_filename_tmp = "_".join([model, var, target_grid, regrid_tool, regrid_method, "metrics"]) mean_climate_metrics_to_json( os.path.join(metrics_output_path, var), @@ -212,7 +213,7 @@ def main(): cmec_flag=cmec, ) - # write collective JSON for all models / all obs / single variable + # write collective JSON --- all models / all obs / single variable json_filename = "_".join([var, target_grid, regrid_tool, regrid_method, "metrics"]) mean_climate_metrics_to_json( metrics_output_path, diff --git a/pcmdi_metrics/version.py b/pcmdi_metrics/version.py index 7c3da6851..192a9f611 100644 --- a/pcmdi_metrics/version.py +++ b/pcmdi_metrics/version.py @@ -1,3 +1,3 @@ __version__ = 'v2.3.1' -__git_tag_describe__ = 'v2.3.1-75-ge8b8a29' -__git_sha1__ = 'e8b8a29293daacacec78ac1baee2f4c2f4cd3442' +__git_tag_describe__ = 'v2.3.1-79-g308ee27' +__git_sha1__ = '308ee276752e3ec1ad90b13dfc8c324210145b40' From e52ff9bcfe7600d0d52e352cf9630d5d458fc1e0 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Tue, 3 Jan 2023 12:32:39 -0800 Subject: [PATCH 069/130] clean up --- .../mean_climate/lib/compute_metrics.py | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/pcmdi_metrics/mean_climate/lib/compute_metrics.py b/pcmdi_metrics/mean_climate/lib/compute_metrics.py index bfee1e0b2..347e3ba50 100644 --- a/pcmdi_metrics/mean_climate/lib/compute_metrics.py +++ b/pcmdi_metrics/mean_climate/lib/compute_metrics.py @@ -253,13 +253,9 @@ def compute_metrics(Var, dm, do): # ZONAL AND SEASONAL MEAN CONTRIBUTIONS -# metrics_dictionary[ 'rms_y'][ sea] = format( -# rms_y * -# conv, -# sig_digits) -# metrics_dictionary[ 'rms_devzm'][ sea] = format( -# rms_xy_devzm * -# conv, -# sig_digits) - -# return metrics_dictionary +# metrics_dictionary['rms_y'][sea] = format( +# rms_y * conv, +# sig_digits) +# metrics_dictionary['rms_devzm'][sea] = format( +# rms_xy_devzm * conv, +# sig_digits) From da4f2dd448c8941e13595882f161bdbbc7345fbf Mon Sep 17 00:00:00 2001 From: lee1043 Date: Tue, 3 Jan 2023 12:40:15 -0800 Subject: [PATCH 070/130] clean up --- .../mean_climate/param/basic_param.py | 35 +++++++++++++------ 1 file changed, 24 insertions(+), 11 deletions(-) diff --git a/pcmdi_metrics/mean_climate/param/basic_param.py b/pcmdi_metrics/mean_climate/param/basic_param.py index 8148c390b..c2d68dae1 100644 --- a/pcmdi_metrics/mean_climate/param/basic_param.py +++ b/pcmdi_metrics/mean_climate/param/basic_param.py @@ -8,7 +8,7 @@ # RUN IDENTIFICATION # DEFINES A SUBDIRECTORY TO METRICS OUTPUT RESULTS SO MULTIPLE CASES CAN # BE COMPARED -case_id = 'v20221025' +case_id = 'v20221130' # LIST OF MODEL VERSIONS TO BE TESTED - WHICH ARE EXPECTED TO BE PART OF # CLIMATOLOGY FILENAME @@ -17,7 +17,9 @@ # VARIABLES TO USE # vars = ['pr', 'ua_850'] -vars = ['pr'] +# vars = ['pr'] +# vars = ['ta-850'] +vars = ['ua-850'] # Observations to use at the moment "default" or "alternate" @@ -30,11 +32,11 @@ regrid_tool = 'regrid2' # 'regrid2' # OPTIONS: 'regrid2','esmf' # OPTIONS: 'linear','conservative', only if tool is esmf regrid_method = 'linear' -regrid_tool_ocn = 'esmf' # OPTIONS: "regrid2","esmf" +regrid_tool_ocn = 'esmf' # OPTIONS: "regrid2","esmf" # OPTIONS: 'linear','conservative', only if tool is esmf regrid_method_ocn = 'linear' -# SAVE INTERPOLATED MODEL CLIMATOLOGIES ? +# SAVE INTERPOLATED MODEL CLIMATOLOGIES? save_test_clims = True # True or False # DIRECTORY WHERE TO PUT INTERPOLATED MODELS' CLIMATOLOGIES @@ -43,21 +45,32 @@ # Templates for climatology files # %(param) will subsitute param with values in this file -filename_template = "cmip6.historical.E3SMv2.r1i1p1.mon.%(variable).198101-200512.AC.v20221020.nc" +filename_template = "cmip6.historical.E3SMv2.r1i1p1.mon.%(variable).198101-200512.AC.v20221027.nc" # filename template for landsea masks ('sftlf') -sftlf_filename_template = "sftlf_fx_E3SM-1-0_historical_r1i1p1f1_gr.nc" +# sftlf_filename_template = "sftlf_fx_E3SM-1-0_historical_r1i1p1f1_gr.nc" +# sftlf_filename_template = "/p/user_pub/work/CMIP6/CMIP/E3SM-Project/E3SM-2-0/piControl/r1i1p1f1/fx/sftlf/gr/v20220913/sftlf_fx_E3SM-2-0_piControl_r1i1p1f1_gr.nc" +sftlf_filename_template = "sftlf_fx_E3SM-2-0_piControl_r1i1p1f1_gr.nc" +# sftlf_filename_template = None generate_sftlf = False # if land surface type mask cannot be found, generate one - -# Region -regions = {"pr": ["global"], - "ua_850": ["global"]} +# generate_sftlf = True # if land surface type mask cannot be found, generate one + +# Region (if not given, default region applied: global, NHEX, SHEX, TROPICS) +regions = { + # "pr": ["global", "NHEX", "SHEX", "TROPICS", "land_NHEX", "ocean_SHEX"], + "pr": ["global"], + # "pr": ["land", "ocean", "land_TROPICS", "ocean_SHEX"], + "ua": ["global"], + "ta": ["global", "NHEX", "SHEX", "TROPICS", "land_NHEX", "ocean_SHEX"], + # "ta": ["NHEX"], + # "ta": ["land_NHEX"] + # "ta": ["global"] +} # ROOT PATH FOR MODELS CLIMATOLOGIES # test_data_path = '/work/lee1043/ESGF/E3SMv2/atmos/mon' test_data_path = './clim' - # ROOT PATH FOR OBSERVATIONS # Note that atm/mo/%(variable)/ac will be added to this reference_data_path = '/p/user_pub/PCMDIobs/obs4MIPs_clims' From 06965167ebd895399311f4057980b16c78f55397 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Fri, 27 Jan 2023 19:42:17 -0800 Subject: [PATCH 071/130] add CONUS domain --- pcmdi_metrics/io/default_regions_define.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pcmdi_metrics/io/default_regions_define.py b/pcmdi_metrics/io/default_regions_define.py index 18dac1193..da8393fe0 100755 --- a/pcmdi_metrics/io/default_regions_define.py +++ b/pcmdi_metrics/io/default_regions_define.py @@ -19,6 +19,8 @@ def load_regions_specs(): "ocean_SHEX": {"value": 0, "domain": {"latitude": (-90.0, -30)}}, "ocean_TROPICS": {"value": 0, "domain": {"latitude": (30.0, 30)}}, "ocean": {"value": 0}, + "CONUS": {"domain": {"latitude": (24.7, 49.4), "longitude": (-124.78, -66.92)}}, + "land_CONUS": {"value": 100, "domain": {"latitude": (24.7, 49.4), "longitude": (-124.78, -66.92)}}, # Modes of variability "NAM": {"domain": {"latitude": (20.0, 90), "longitude": (-180, 180)}}, "NAO": {"domain": {"latitude": (20.0, 80), "longitude": (-90, 40)}}, From 5fd5243a5f548a89d8177ad829e3aff81cc73e34 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Mon, 30 Jan 2023 09:39:16 -0800 Subject: [PATCH 072/130] clean up --- .../scripts/allvars_parallel_mod_clims.py | 55 +++++++++++++++++++ 1 file changed, 55 insertions(+) create mode 100644 pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py diff --git a/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py b/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py new file mode 100644 index 000000000..6b415768d --- /dev/null +++ b/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py @@ -0,0 +1,55 @@ +import datetime +import glob +import os + +from pcmdi_metrics.misc.scripts import parallel_submitter + +exp = 'historical' +# exp = 'amip' +mip = 'cmip6' +verin = 'v20220924' #'v20210731' #'v20201226' +start = '1981-01' +end = '2005-12' +numw = 35 # None #35 +verout = datetime.datetime.now().strftime('v%Y%m%d') + +# vars = ['rlut', 'tas', 'pr'] +# vars = ['ts', 'tas', 'uas', 'vas', 'huss', 'hurs', 'psl', 'prw', 'sfcWind', 'tauu', 'tauv', 'pr', 'rlut', 'rsut', 'rlutcs', 'rsutcs', 'rsdt', 'rsus', 'rsds', 'rlds', 'rlus', 'rldscs', 'rsdscs'] +# vars = ['ta', 'ua', 'va', 'zg', 'hur', 'hus'] +# vars = ['ts', 'tas', 'uas', 'vas', 'huss', 'hurs', 'psl', 'prw', 'sfcWind', 'tauu', 'tauv', 'pr', 'rlut', 'rsut', 'rlutcs', 'rsutcs', 'rsdt', 'rsus', 'rsds', 'rlds', 'rlus', 'rldscs', 'rsdscs', 'ta', 'ua', 'va', 'zg', 'hur', 'hus'] +vars = ['ts'] + +lst1 = [] +listlog = [] + +for var in vars: + pin = '/p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/' + verin + '/' + mip + '/' + exp + '/atmos/mon/' + var + '/' + + lst = sorted(glob.glob(pin + '*r1i1p1f1*.xml')) + + pathout_base = '/p/user_pub/pmp/pmp_results/pmp_v1.1.2/diagnostic_results/CMIP_CLIMS/' + mip + '/' + exp + '/' + pathoutdir = os.path.join(pathout_base, verout, var) + + os.makedir(pathoutdir, exist_ok=True) + + for li in lst: + + print(li.split('.')) + mod = li.split('.')[4] + rn = li.split('.')[5] + vv = li.split('.')[7] + + outfilename = mip + '.' + exp + '.' + mod + '.r1i1p1f1.mon.' + var + '.nc' + cmd0 = "pcmdi_compute_climatologies.py --start " + start + " --end " + end + " --infile " + + pathout = pathoutdir + '/' + outfilename + cmd = cmd0 + li + ' --outfile ' + pathout + ' --var ' + var + + lst1.append(cmd) + logf = mod + '.' + rn + '.' + vv + '.txt' + listlog.append(logf) + print(logf) + +print('Number of jobs starting is ', str(len(lst1))) +parallel_submitter(lst1, log_dir='./logs', logfilename_list=listlog, num_workers=numw) +print('done submitting') From 9939d2e22c732e9e3f3fab60383ebef0d0641a85 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Mon, 30 Jan 2023 09:39:31 -0800 Subject: [PATCH 073/130] clean up --- pcmdi_metrics/io/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pcmdi_metrics/io/base.py b/pcmdi_metrics/io/base.py index 5dab0621c..099b87937 100755 --- a/pcmdi_metrics/io/base.py +++ b/pcmdi_metrics/io/base.py @@ -177,7 +177,7 @@ def write( if not os.path.exists(dir_path): try: - os.makedirs(dir_path) + os.makedirs(dir_path, exist_ok=True) except Exception: logging.getLogger("pcmdi_metrics").error( "Could not create output directory: %s" % dir_path From 3124ecb0db4ab8b5872352abf4d67bf51669a898 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Mon, 30 Jan 2023 09:57:13 -0800 Subject: [PATCH 074/130] clean up, typo fix --- .../mean_climate/scripts/allvars_parallel_mod_clims.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py b/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py index 6b415768d..35998b66d 100644 --- a/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py +++ b/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py @@ -30,7 +30,7 @@ pathout_base = '/p/user_pub/pmp/pmp_results/pmp_v1.1.2/diagnostic_results/CMIP_CLIMS/' + mip + '/' + exp + '/' pathoutdir = os.path.join(pathout_base, verout, var) - os.makedir(pathoutdir, exist_ok=True) + os.makedirs(pathoutdir, exist_ok=True) for li in lst: From fd872d02717db633e70bc34142beb815f7d4a2f0 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Mon, 30 Jan 2023 11:11:49 -0800 Subject: [PATCH 075/130] add some memo --- pcmdi_metrics/mean_climate/scripts/README.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 pcmdi_metrics/mean_climate/scripts/README.md diff --git a/pcmdi_metrics/mean_climate/scripts/README.md b/pcmdi_metrics/mean_climate/scripts/README.md new file mode 100644 index 000000000..f252c3c0e --- /dev/null +++ b/pcmdi_metrics/mean_climate/scripts/README.md @@ -0,0 +1 @@ +* allvars_parallel_mod_clims.py: PCMDI internal script to generate annual cycle netCDF files as the first step for mean climate metrics calculation From 9bf6c38fa1cb2a5ba97c20189a3877ad37cf354f Mon Sep 17 00:00:00 2001 From: lee1043 Date: Tue, 31 Jan 2023 11:05:42 -0800 Subject: [PATCH 076/130] pcmdi internal parameter, initial commit after clean up. Further clean up very much needed --- .../param/pcmdi_MIP_EXP_pmp_parameterfile.py | 228 ++++++++++++++++++ 1 file changed, 228 insertions(+) create mode 100755 pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py diff --git a/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py b/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py new file mode 100755 index 000000000..535137e19 --- /dev/null +++ b/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py @@ -0,0 +1,228 @@ +import datetime +import json +import os +import sys + +import cdutil + +ver = datetime.datetime.now().strftime('v%Y%m%d') + +# ############################################################################### +# OPTIONS ARE SET BY USER IN THIS FILE AS INDICATED BELOW BY: +# ############################################################################### +case_id = ver + +# MIP = 'cmip6' # 'CMIP6' +MIP = 'cmip5' # 'CMIP6' +exp = 'historical' +# exp = 'amip' +# exp = 'picontrol' + +user_notes = "Provenance and results" +metrics_in_single_file = 'y' # 'y' or 'n' +regional = 'n' # 'n' + +cmec = False # True + +# ################################################################ + +if MIP == 'cmip6': + modver = 'v20220928' +if MIP == 'cmip5': + modver = 'v20220928' + if exp == 'historical': + modver = 'v20220928' + +# LIST OF MODEL VERSIONS TO BE TESTED - WHICH ARE EXPECTED TO BE PART OF CLIMATOLOGY FILENAME + +# all_mods_dic= json.load(open('all_mip_mods-v20220927_2.json')) #all_mip_mods-v20200528.json')) +all_mods_dic = json.load(open('all_mip_mods-v20220928.json')) #all_mip_mods-v20200528.json')) +# all_mods_dic = ['E3SM-1-0', 'ACCESS-CM2'] + +# test_data_set = all_mods_dic +test_data_set = all_mods_dic[MIP][exp] +test_data_set.sort() + +print(len(test_data_set), ' ', test_data_set) +print('----------------------------------------------------------------') + +simulation_description_mapping = {"creation_date": "creation_date", "tracking_id": 'tracking_id', } + +# VARIABLES AND OBSERVATIONS TO USE + +realm = 'Amon' +# realm = 'Omon' + +# #################### +# WITHOUT PARALLELIZATION + +''' +vars = ['pr', 'rltcre', 'rstcre', 'rt', 'rst', 'rlut', 'tauu', 'tauv'] +vars = ['ts', 'psl', 'tauu', 'tauv', 'tas', 'ta_850', 'ta_200', 'ua_850', 'ua_200', 'va_850', 'va_200', 'zg_500', 'pr', 'rltcre', 'rstcre', 'rt', 'rst', 'rlut'] +vars = ['tas', 'rlut', 'pr', 'ta_850', 'ta_200', 'ua_850', 'ua_200', 'va_850', 'va_200', 'zg_500'] +''' + +if regional == 'y': + vars = ['tas', 'ts', 'psl', 'sfcWind'] #, 'tauu', 'tauv'] ## THESE DO NOT WORK WITH PARALLELIZATON + # vars = ['tas'] + +# #################### +# WITH PARALLELIZATION + +''' +vars = [['psl', ], ['pr', ], ['prw', ], ['tas', ], ['uas', ], ['vas', ], ['sfcWind', ], ['tauu'], ['tauv']] +#vars = [['ta_850', ], ['ta_200', ], ['ua_850', ], ['ua_200', ], ['va_850', ], ['va_200', ], ['zg_500']] +vars = [['rlut', ], ['rsut', ], ['rsutcs', ], ['rlutcs', ], ['rsdt', ], ['rsus', ], ['rsds', ], ['rlds', ], ['rlus', ], ['rldscs', ], ['rsdscs']] +''' +# ALL BUT NOT tas ts psl sfcwind tauu tauv +if regional == 'n': + vars = [['pr', ], ['prw', ], ['uas', ], ['vas', ], ['ta_850', ], ['ta_200', ], ['ua_850', ], ['ua_200', ], ['va_850', ], ['va_200', ], ['zg_500'], ['rlut', ], ['rsut', ], ['rsutcs', ], ['rlutcs', ], ['rsdt', ], ['rsus', ], ['rsds', ], ['rlds', ], ['rlus', ], ['rldscs', ], ['rsdscs'], ['rltcre', ], ['rstcre', ], ['rt', ]] + +# vars = [['pr', ], ['rlut', ], ] +# vars = [['ts', ], ['psl', ]] + +# ################### +# vars = ['ts'] + +# MODEL SPECIFIC PARAMETERS +model_tweaks = { + # Keys are model accronym or None which applies to all model entries + None: {"variable_mapping": {"rlwcrf1": "rlutcre1"}}, # Variables name mapping + "GFDL-ESM2G": {"variable_mapping": {"tos": "tos"}}, +} + + +# USER CUSTOMIZED REGIONS +if regional == regional: # 'y': + regions_specs = { + "Nino34": {"value": 0., "domain": cdutil.region.domain(latitude=(-5., 5.), longitude=(190., 240.))}, + 'ocean': {"value": 0., 'domain': cdutil.region.domain(latitude=(-90., 90))}, + 'land': {"value": 100., 'domain': cdutil.region.domain(latitude=(-90., 90))}, + 'ocean_50S50N': {"value": 0., 'domain': cdutil.region.domain(latitude=(-50., 50))}, + 'ocean_50S20S': {"value": 0., 'domain': cdutil.region.domain(latitude=(-50., -20))}, + 'ocean_20S20N': {"value": 0., 'domain': cdutil.region.domain(latitude=(-20., 20))}, + 'ocean_20N50N': {"value": 0., 'domain': cdutil.region.domain(latitude=(20., 50))}, + 'ocean_50N90N': {"value": 0., 'domain': cdutil.region.domain(latitude=(50., 90))}, + '90S50S': {"value": None, 'domain': cdutil.region.domain(latitude=(-90., -50))}, + '50S20S': {"value": None, 'domain': cdutil.region.domain(latitude=(-50., -20))}, + '20S20N': {"value": None, 'domain': cdutil.region.domain(latitude=(-20., 20))}, + '20N50N': {"value": None, 'domain': cdutil.region.domain(latitude=(20., 50))}, + '50N90N': {"value": None, 'domain': cdutil.region.domain(latitude=(50., 90))}, + 'NH': {"value": None, 'domain': cdutil.region.domain(latitude=(0., 90))}, + 'SH': {"value": None, 'domain': cdutil.region.domain(latitude=(-90., 0))}, + 'NHEX_ocean': {"value": 0., 'domain': cdutil.region.domain(latitude=(0., 90))}, + 'SHEX_ocean': {"value": 0., 'domain': cdutil.region.domain(latitude=(-90., 0))}, + 'NHEX_land': {"value": 100., 'domain': cdutil.region.domain(latitude=(20., 90))}, + 'SHEX_land': {"value": 100., 'domain': cdutil.region.domain(latitude=(-90., -20.))}} + # 'GLOBAL': {"value": 0., 'domain': cdutil.region.domain(latitude=(-90., 90.))}, + +regions = { + "tas": [None, "land", "ocean", "ocean_50S50N", "NHEX_land", "SHEX_land"], + "tauu": [None, "ocean_50S50N"], + "tauv": [None, "ocean_50S50N"], + "psl": [None, "ocean", "ocean_50S50N", "NHEX_ocean", "SHEX_ocean"], + "sfcWind": [None, "ocean", "ocean_50S50N", "NHEX_ocean", "SHEX_ocean"], + "ts": [None, "ocean", "ocean_50S50N", "NHEX_ocean", "SHEX_ocean"], + "tos": [None]} + +# USER CAN CUSTOMIZE REGIONS VALUES NAMES +# regions_values = {"land": 100., "ocean": 0.} + +# Observations to use at the moment "default" or "alternate" +ref = 'all' +reference_data_set = ['default'] # ['default'] #, 'alternate1'] #, 'alternate', 'ref3'] +ext = '.xml' #'.nc' +ext = '.nc' + +# INTERPOLATION OPTIONS + +target_grid = '2.5x2.5' # OPTIONS: '2.5x2.5' or an actual cdms2 grid object +targetGrid = target_grid +target_grid_string = '2p5x2p5' +regrid_tool = 'regrid2' # 'esmf' #'regrid2' # OPTIONS: 'regrid2', 'esmf' +regrid_method = 'regrid2' # 'conservative' #'linear' # OPTIONS: 'linear', 'conservative', only if tool is esmf +regrid_tool_ocn = 'esmf' # OPTIONS: "regrid2", "esmf" +regrid_method_ocn = 'conservative' # OPTIONS: 'linear', 'conservative', only if tool is esmf + +# regrid_tool = 'esmf' #'esmf' #'regrid2' # OPTIONS: 'regrid2', 'esmf' +# regrid_method = 'linear' #'conservative' #'linear' # OPTIONS: 'linear', 'conservative', only if tool is esmf + +# SIMULATION PARAMETERg +period = '1981-2005' +# period = '1979-1989' + +realization = 'r1i1p1' + +# SAVE INTERPOLATED MODEL CLIMATOLOGIES ? +save_test_clims = True # True or False + +# DATA LOCATION: MODELS, OBS AND METRICS OUTPUT +# ################################################ +# Templates for climatology files + +verd = '*' +if exp == 'amip': + filename_template = "%(variable)_%(model_version)_%(table)_amip_%(exp)r1i1p1_198101-200512-clim.nc" +if exp == 'amip': + filename_template = "CMIP5.amip.%(model_version).r1i1p1.mon.%(variable).198101-200512.AC.v20190225.nc" +# if exp == 'historical': + # filename_template = "CMIP5.historical.%(model_version).r1i1p1.mon.%(variable).198101-200512.AC.v20190307.nc" +if exp == 'historical' and MIP == 'cmip5': + filename_template = MIP + '.historical.%(model_version).r1i1p1.mon.%(variable).198101-200512.AC.' + modver + '.nc' +if exp == 'amip' and MIP == 'cmip5': + filename_template = MIP + '.amip.%(model_version).r1i1p1.mon.%(variable).198101-200512.AC.' + modver + '.nc' +if exp == 'historical' and MIP == 'cmip6': + filename_template = MIP + '.historical.%(model_version).r1i1p1f1.mon.%(variable).198101-200512.AC.' + modver + '.nc' +if exp == 'amip' and MIP == 'cmip6': + filename_template = MIP + '.amip.%(model_version).r1i1p1f1.mon.%(variable).198101-200512.AC.' + modver + '.nc' + +# if exp == 'historical': + # filename_template = "CMIP5.historical.%(model_version).r1i1p1.mon.%(variable).198101-200512.AC.%('*').nc" +if exp == 'picontrol': + filename_template = "%(variable)_%(model_version)_%(table)_picontrol_%(exp)r1i1p1_01-12-clim.nc" + +# Templates for MODEL land/sea mask (sftlf) +# filename template for landsea masks ('sftlf') +# sftlf_filename_template = "/work/gleckler1/processed_data/cmip5_fixed_fields/sftlf/sftlf_%(model_version).nc" + +generate_sftlf = True # ESTIMATE LAND SEA MASK IF NOT FOUND + +sftlf_filename_template = "cmip6.historical.%(model_version).sftlf.nc" # "sftlf_%(model_version).nc" + +# ROOT PATH FOR MODELS CLIMATOLOGIES +test_data_path = '/p/user_pub/pmp/pmp_results/pmp_v1.1.2/diagnostic_results/CMIP_CLIMS/' + MIP + '/' + exp + '/' + modver + '/%(variable)/' + +# ROOT PATH FOR OBSERVATIONS +# reference_data_path = '/work/gleckler1/processed_data/obs/' +reference_data_path = '/p/user_pub/PCMDIobs/obs4MIPs_clims/' +# custom_observations = os.path.abspath('/p/user_pub/PCMDIobs/catalogue/obs4MIPs_PCMDI_clims_byVar_catalogue_v20210805.json') +custom_observations = './obs4MIPs_PCMDI_clims_byVar_catalogue_v20210805_ljw.json' + +print('CUSTOM OBS ARE ', custom_observations) +if not os.path.exists(custom_observations): + sys.exit() + +# ###################################### +# DIRECTORY AND FILENAME FOR OUTPUTING METRICS RESULTS +# BY INDIVIDUAL MODELS +if metrics_in_single_file != 'y': + metrics_output_path = '/p/user_pub/pmp/pmp_results/pmp_v1.1.2/metrics_results/mean_climate/' + MIP + '/' + exp + '/%(case_id)/%(variable)%(level)/' # INDIVIDUAL MOD FILES + output_json_template = '%(model_version).%(variable)%(level).' + MIP + '.' + exp + '.%(regrid_method).' + target_grid_string + '.' + case_id # INDIVIDUAL MOD FILES +# ALL MODELS IN ONE FILE +if metrics_in_single_file == 'y': + metrics_output_path = '/p/user_pub/pmp/pmp_results/pmp_v1.1.2/metrics_results/mean_climate/' + MIP + '/' + exp + '/%(case_id)/' # All SAME FILE + output_json_template = '%(variable)%(level).' + MIP + '.' + exp + '.%(regrid_method).' + target_grid_string + '.' + case_id # ALL SAME FILE +# ####################################### + +# DIRECTORY WHERE TO PUT INTERPOLATED MODELS' CLIMATOLOGIES +test_clims_interpolated_output = '/work/gleckler1/processed_data/metrics_package/pmp_diagnostics' + '/interpolated_model_clims_' + exp + '/' + case_id +test_clims_interpolated_output = '/p/user_pub/pmp/pmp_results/pmp_v1.1.2/diagnostic_results' + '/interpolated_model_clims_' + exp + '/' + case_id + +test_clims_interpolated_output = '/p/user_pub/pmp/pmp_results/pmp_v1.1.2/diagnostic_results' + '/interpolated_model_clims/' + MIP + '/' + exp + '/' + case_id + +# FILENAME FOR INTERPOLATED CLIMATOLGIES OUTPUT +filename_output_template = MIP + ".%(model_version)." + exp + ".r1i1p1.mo.%(variable)%(level).%(period).interpolated.%(regrid_method).%(region).AC." + case_id + "%(ext)" + +if regional == 'n': + num_workers = 20 # 17 + granularize = ["vars"] From ed975ac1b70f190de2bbf1578c3bc11931d3ce65 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Tue, 31 Jan 2023 11:31:22 -0800 Subject: [PATCH 077/130] add internal usage script to the repo --- .../scripts/get_all_MIP_mods_from_CLIMS.py | 34 +++++++++++++++++++ 1 file changed, 34 insertions(+) create mode 100755 pcmdi_metrics/mean_climate/scripts/get_all_MIP_mods_from_CLIMS.py diff --git a/pcmdi_metrics/mean_climate/scripts/get_all_MIP_mods_from_CLIMS.py b/pcmdi_metrics/mean_climate/scripts/get_all_MIP_mods_from_CLIMS.py new file mode 100755 index 000000000..811e8864e --- /dev/null +++ b/pcmdi_metrics/mean_climate/scripts/get_all_MIP_mods_from_CLIMS.py @@ -0,0 +1,34 @@ +import glob +import json + +ver = 'v20230130' + +pin = '/p/user_pub/pmp/pmp_results/pmp_v1.1.2/diagnostic_results/CMIP_CLIMS/%(MIP)/%(EXP)/' + ver + '/ts/' + +# MIPS = ['cmip6', 'cmip5'] +# exps = ['historical', 'amip'] + +MIPS = ['cmip6'] +exps = ['historical'] + +mod_dic = {} + +for mip in MIPS: + mod_dic[mip] = {} + for exp in exps: + ptmp = pin.replace('%(MIP)', mip).replace('%(EXP)', exp) + print('MIP: ', mip) + print('exp: ', exp) + print('dir: ', ptmp) + + lst = sorted(glob.glob(ptmp + '*.r1*.AC.' + ver + '.nc')) + mods = [] + for li in lst: + mod = li.split('.')[4] + if mod not in mods: + mods.append(mod) + + print(mods) + mod_dic[mip][exp] = sorted(mods) + +json.dump(mod_dic, open('all_mip_mods-' + ver + '.json', 'w'), indent=4, sort_keys=True) From c5d48f854098ea0fdb55cd5ebdafbc7865e4e700 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Tue, 31 Jan 2023 14:05:06 -0800 Subject: [PATCH 078/130] add more scripts and clean up --- .../param/pcmdi_MIP_EXP_pmp_parameterfile.py | 2 +- pcmdi_metrics/mean_climate/scripts/README.md | 11 +- .../mean_climate/scripts/mk_CRF_clims.py | 132 ++++++++++++++++++ 3 files changed, 143 insertions(+), 2 deletions(-) create mode 100755 pcmdi_metrics/mean_climate/scripts/mk_CRF_clims.py diff --git a/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py b/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py index 535137e19..a4f3344eb 100755 --- a/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py +++ b/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py @@ -82,7 +82,7 @@ # vars = [['ts', ], ['psl', ]] # ################### -# vars = ['ts'] +vars = ['ts'] # MODEL SPECIFIC PARAMETERS model_tweaks = { diff --git a/pcmdi_metrics/mean_climate/scripts/README.md b/pcmdi_metrics/mean_climate/scripts/README.md index f252c3c0e..91b5ea957 100644 --- a/pcmdi_metrics/mean_climate/scripts/README.md +++ b/pcmdi_metrics/mean_climate/scripts/README.md @@ -1 +1,10 @@ -* allvars_parallel_mod_clims.py: PCMDI internal script to generate annual cycle netCDF files as the first step for mean climate metrics calculation +# Run PMP Mean Climate (PCMDI internal usage) + +## Generate annual cycle files +* `allvars_parallel_mod_clims.py`: PCMDI internal script to generate annual cycle netCDF files as the first step for mean climate metrics calculation +* `mk_CRF_clims.py`: after clims have been calculated the cloud radiative forcing (CRF) clims need to be calculated by combining radiation variables + +## Prepare run metrics calculations +* `get_all_MIP_mods_from_CLIMS.py`: Generate a json file that includes list of models, e.g., `all_mip_mods-v20230130.json` + +mean_climate_driver.py -p ../param/pcmdi_MIP_EXP_pmp_parameterfile.py diff --git a/pcmdi_metrics/mean_climate/scripts/mk_CRF_clims.py b/pcmdi_metrics/mean_climate/scripts/mk_CRF_clims.py new file mode 100755 index 000000000..20448a84d --- /dev/null +++ b/pcmdi_metrics/mean_climate/scripts/mk_CRF_clims.py @@ -0,0 +1,132 @@ +#!/usr/local/uvcdat/latest/bin/python + +import glob +import os + +import cdms2 as cdms +import MV2 as MV + +cdms.setAutoBounds('on') + +cdms.setNetcdfShuffleFlag(0) +cdms.setNetcdfDeflateFlag(0) +cdms.setNetcdfDeflateLevelFlag(0) + +exp = 'historical' +# exp = 'amip' + +MIP = 'cmip6' # 'CMIP6' +# MIP = 'cmip5' # 'CMIP5' + +if MIP == 'cmip6': + ver = 'v20220927' # v20210812' #'v20210806' #'v20200526' +if MIP == 'cmip5': + ver = 'v20200426' #'v20191016' #'v20190820' #'v20190307' + +# NEED TO RUN SEPERATELY FOR LW AND SW (i.e., rsut and rlut) +radvar = 'rsut' +# radvar = 'rlut' + +pit = '/p/user_pub/pmp/pmp_results/pmp_v1.1.2/diagnostic_results/CMIP_CLIMS/' + MIP + '/' + exp + '/' + ver + '/' +pi = pit + radvar + 'cs/' + +lst = glob.glob(pi + '*' + radvar + 'cs' '*.nc') + +for lc in lst: + try: + l = lc.replace(radvar + 'cs', radvar) + + if os.path.isfile(l): + + if radvar == 'rsut': + fixname = 'rstcre' + elif radvar == 'rlut': + fixname = 'rltcre' + + os.makedirs(pi.replace(radvar + 'cs', fixname), exist_ok=True) + + f = cdms.open(l) + d = f(radvar) + fc = cdms.open(lc) + att_keys = fc.attributes.keys() + dc = fc(radvar + 'cs') + f.close() + fc.close() + + dgrid = d.getGrid() + + cre = MV.subtract(dc, d) + cre.setGrid(dgrid) + + cre.id = fixname + + cre.units = "W m-2" + + lo = l.replace(radvar, fixname) + + g = cdms.open(lo, 'w+') + for att in f.attributes.keys(): + setattr(g, att, f.attributes[att]) + g.write(cre) + g.close() + + print('done with ', lo) + + if radvar == 'rsut': + l1 = lc.replace('rsutcs', 'rsdt') # [:-1] + + try: + f1 = cdms.open(l1) + d1 = f1('rsdt') + # dif = -1.*d1 + dif = MV.subtract(d1, d) + + dif.units = 'W m-2' + dif.id = 'rst' + + l2 = l1.replace('rsdt', 'rst') + + os.makedirs(pit + '/rst', exist_ok=True) + + print('starting ', l2) + + g = cdms.open(l2, 'w+') + + for att in f1.attributes.keys(): + setattr(g, att, f1.attributes[att]) + g.write(dif) + + att_keys = f1.attributes.keys() + att_dic = {} + g.close() + f1.close() + + except Exception: + print('no rsdt ') # for ', l1 + + # ### AND FINALLY, THE NET + try: + lw = l2.replace('rst', 'rlut') + f3 = cdms.open(lw) + d3 = f3('rlut') + + net = MV.subtract(dif, d3) + net.id = 'rt' + + os.makedirs(pit + '/rt', exist_ok=True) + + ln = lw.replace('rlut', 'rt') + + g3 = cdms.open(ln, 'w+') + for att in f3.attributes.keys(): + setattr(g3, att, f3.attributes[att]) + + g3.write(net) + print('done with ', ln) + f3.close() + g3.close() + except Exception: + print('not working for ', lc) + except Exception: + print('not working for -----', lc) + pass From 13c41bf1370b3f2845dd12f2824a843c07669385 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Wed, 1 Feb 2023 21:03:32 -0800 Subject: [PATCH 079/130] add xarray and xcdat for provenance --- pcmdi_metrics/io/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pcmdi_metrics/io/base.py b/pcmdi_metrics/io/base.py index 099b87937..5965564c1 100755 --- a/pcmdi_metrics/io/base.py +++ b/pcmdi_metrics/io/base.py @@ -82,7 +82,7 @@ def update_dict(d, u): def generateProvenance(): - extra_pairs = {"matplotlib": "matplotlib ", "scipy": "scipy"} + extra_pairs = {"matplotlib": "matplotlib ", "scipy": "scipy", "xcdat": "xcdat", "xarray", "xarray"} prov = cdat_info.generateProvenance(extra_pairs=extra_pairs) prov["packages"]["PMP"] = pcmdi_metrics.version.__git_tag_describe__ prov["packages"][ From 71b928258c876ec56f6d01b6202651c4e086e4ae Mon Sep 17 00:00:00 2001 From: lee1043 Date: Wed, 1 Feb 2023 21:12:40 -0800 Subject: [PATCH 080/130] temporary solution for the setuptools issue (#893) --- conda-env/dev.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/conda-env/dev.yml b/conda-env/dev.yml index 9724506d2..b85a339a0 100644 --- a/conda-env/dev.yml +++ b/conda-env/dev.yml @@ -21,6 +21,7 @@ dependencies: - enso_metrics=1.1.1 - xcdat=0.4.0 - xmltodict=0.13.0 + - setuptools=65.5.0 # Testing # ================== - pre_commit=2.15.0 From 10e1b4f2427b902327f6ef410245240bca9e82c6 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Wed, 1 Feb 2023 21:13:45 -0800 Subject: [PATCH 081/130] typo fix --- pcmdi_metrics/io/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pcmdi_metrics/io/base.py b/pcmdi_metrics/io/base.py index 5965564c1..21b8eae70 100755 --- a/pcmdi_metrics/io/base.py +++ b/pcmdi_metrics/io/base.py @@ -82,7 +82,7 @@ def update_dict(d, u): def generateProvenance(): - extra_pairs = {"matplotlib": "matplotlib ", "scipy": "scipy", "xcdat": "xcdat", "xarray", "xarray"} + extra_pairs = {"matplotlib": "matplotlib ", "scipy": "scipy", "xcdat": "xcdat", "xarray": "xarray"} prov = cdat_info.generateProvenance(extra_pairs=extra_pairs) prov["packages"]["PMP"] = pcmdi_metrics.version.__git_tag_describe__ prov["packages"][ From 9c39fc42ad0f8f46b59f350204eb13e1f942bc18 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Wed, 1 Feb 2023 21:14:29 -0800 Subject: [PATCH 082/130] add flexibility for using longitude range to subset, either (-180,180) or (0, 360) --- pcmdi_metrics/io/default_regions_define.py | 41 ++++++++++++++++------ 1 file changed, 31 insertions(+), 10 deletions(-) diff --git a/pcmdi_metrics/io/default_regions_define.py b/pcmdi_metrics/io/default_regions_define.py index da8393fe0..70a6b4ded 100755 --- a/pcmdi_metrics/io/default_regions_define.py +++ b/pcmdi_metrics/io/default_regions_define.py @@ -1,3 +1,6 @@ +import xcdat as xc + + def load_regions_specs(): regions_specs = { @@ -61,7 +64,7 @@ def load_regions_specs(): return regions_specs -def region_subset(d, regions_specs, region=None): +def region_subset(ds, regions_specs, region=None): """ d: xarray.Dataset regions_specs: dict @@ -75,17 +78,35 @@ def region_subset(d, regions_specs, region=None): if 'latitude' in list(regions_specs[region]['domain'].keys()): lat0 = regions_specs[region]['domain']['latitude'][0] lat1 = regions_specs[region]['domain']['latitude'][1] - if 'latitude' in (d.coords.dims): - d = d.sel(latitude=slice(lat0, lat1)) - elif 'lat' in (d.coords.dims): - d = d.sel(lat=slice(lat0, lat1)) + # proceed subset + if 'latitude' in (ds.coords.dims): + ds = ds.sel(latitude=slice(lat0, lat1)) + elif 'lat' in (ds.coords.dims): + ds = ds.sel(lat=slice(lat0, lat1)) if 'longitude' in list(regions_specs[region]['domain'].keys()): lon0 = regions_specs[region]['domain']['longitude'][0] lon1 = regions_specs[region]['domain']['longitude'][1] - if 'longitude' in (d.coords.dims): - d = d.sel(longitude=slice(lon0, lon1)) - elif 'lon' in (d.coords.dims): - d = d.sel(lon=slice(lon0, lon1)) + + # check original dataset longitude range + if 'longitude' in (ds.coords.dims): + lon_min = ds.longitude.min() + lon_max = ds.longitude.max() + elif 'lon' in (ds.coords.dims): + lon_min = ds.lon.min() + lon_max = ds.lon.max() + + # longitude range swap if needed + if min(lon0, lon1) < 0: # when subset region lon is defined in (-180, 180) range + if min(lon_min, lon_max) < 0: # if original data lon range is (-180, 180) no treatment needed + pass + else: # if original data lon range is (0, 360), convert swap lon + ds = xc.swap_lon_axis(ds, to=(-180, 180)) + + # proceed subset + if 'longitude' in (ds.coords.dims): + ds = ds.sel(longitude=slice(lon0, lon1)) + elif 'lon' in (ds.coords.dims): + ds = ds.sel(lon=slice(lon0, lon1)) - return d + return ds From bafd6e030e49affd1c9f2ebbfcf64694035b7f7b Mon Sep 17 00:00:00 2001 From: lee1043 Date: Wed, 1 Feb 2023 21:15:19 -0800 Subject: [PATCH 083/130] bug fix --- .../lib/mean_climate_metrics_to_json.py | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_to_json.py b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_to_json.py index 34fe4bf93..fe45b2cd4 100644 --- a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_to_json.py +++ b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_to_json.py @@ -1,10 +1,13 @@ from copy import deepcopy from pcmdi_metrics.io.base import Base +import json def mean_climate_metrics_to_json( - outdir, json_filename, result_dict, model=None, run=None, cmec_flag=False + outdir, json_filename, result_dict, + model=None, run=None, + cmec_flag=False, debug=False ): # Open JSON JSON = Base( @@ -17,10 +20,11 @@ def mean_climate_metrics_to_json( models_in_dict = list(json_dict["RESULTS"].keys()) for m in models_in_dict: if m == model: - runs_in_model_dict = list(json_dict["RESULTS"][m].keys()) - for r in runs_in_model_dict: - if r != run and run is not None: - del json_dict["RESULTS"][m][r] + for ref in list(json_dict["RESULTS"][m].keys()): + runs_in_model_dict = list(json_dict["RESULTS"][m][ref].keys()) + for r in runs_in_model_dict: + if (r != run) and (run is not None): + del json_dict["RESULTS"][m][ref][r] else: del json_dict["RESULTS"][m] # Write selected dict to JSON @@ -39,6 +43,11 @@ def mean_climate_metrics_to_json( mode="r+", sort_keys=True, ) + + if debug: + print('in mean_climate_metrics_to_json, model, run:', model, run) + print('json_dict:', json.dumps(json_dict, sort_keys=True, indent=4)) + if cmec_flag: print("Writing cmec file") JSON.write_cmec(indent=4, separators=(",", ": ")) From 82d884d0e174dc33669a36dbfee46e64b220c238 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Wed, 1 Feb 2023 21:16:08 -0800 Subject: [PATCH 084/130] clean up --- .../mean_climate/mean_climate_driver.py | 36 +++-- .../param/pcmdi_MIP_EXP_pmp_parameterfile.py | 129 +++++------------- .../pcmdi_compute_climatologies.py | 13 +- .../scripts/get_all_MIP_mods_from_CLIMS.py | 2 +- 4 files changed, 61 insertions(+), 119 deletions(-) diff --git a/pcmdi_metrics/mean_climate/mean_climate_driver.py b/pcmdi_metrics/mean_climate/mean_climate_driver.py index 8e9002ea4..a8509caa4 100755 --- a/pcmdi_metrics/mean_climate/mean_climate_driver.py +++ b/pcmdi_metrics/mean_climate/mean_climate_driver.py @@ -39,6 +39,7 @@ def main(): filename_template = parameter.filename_template sftlf_filename_template = parameter.sftlf_filename_template generate_sftlf = parameter.generate_sftlf + regions_specs = parameter.regions_specs regions = parameter.regions test_data_path = parameter.test_data_path reference_data_path = parameter.reference_data_path @@ -53,6 +54,10 @@ def main(): debug = True + if not bool(regions_specs): + regions_specs = load_regions_specs() + + default_regions = ['global', 'NHEX', 'SHEX', 'TROPICS'] print( 'case_id: ', case_id, '\n', 'test_data_set:', test_data_set, '\n', @@ -68,6 +73,7 @@ def main(): 'filename_template:', filename_template, '\n', 'sftlf_filename_template:', sftlf_filename_template, '\n', 'generate_sftlf:', generate_sftlf, '\n', + 'regions_specs:', regions_specs, '\n', 'regions:', regions, '\n', 'test_data_path:', test_data_path, '\n', 'reference_data_path:', reference_data_path, '\n', @@ -75,9 +81,6 @@ def main(): print('--- prepare mean climate metrics calculation ---') - regions_specs = load_regions_specs() - default_regions = ['global', 'NHEX', 'SHEX', 'TROPICS'] - # generate target grid if target_grid == "2.5x2.5": # target grid for regridding @@ -107,9 +110,6 @@ def main(): # if debug: # print('obs_dict:', json.dumps(obs_dict, indent=4, sort_keys=True)) - # set dictionary for .json record - result_dict = tree() - print('--- start mean climate metrics calculation ---') # ------------- @@ -130,6 +130,9 @@ def main(): print('varname:', varname) print('level:', level) + # set dictionary for .json record + result_dict = tree() + # ---------------- # observation loop # ---------------- @@ -149,15 +152,16 @@ def main(): # model loop # ---------- for model in test_data_set: - print('model:', model) for run in realization: + print('model, run:', model, run) ds_test_dict = dict() # identify data to load (annual cycle (AC) data is loading in) test_data_full_path = os.path.join( test_data_path, - filename_template.replace('%(variable)', varname).replace('%(model)', model).replace('%(realization)', run)) + filename_template).replace('%(variable)', varname).replace('%(model)', model).replace('%(realization)', run) # load data and regrid ds_test = load_and_regrid(test_data_full_path, varname, level, t_grid, regrid_tool=regrid_tool, debug=debug) + print('load and regrid done') # ----------- # region loop @@ -166,20 +170,19 @@ def main(): print('region:', region) # land/sea mask -- conduct masking only for variable data array, not entire data - if region.split('_')[0] in ['land', 'ocean']: - surface_type = region.split('_')[0] + if ('land' in region.split('_')) or ('ocean' in region.split('_')): ds_test_tmp = ds_test.copy(deep=True) ds_ref_tmp = ds_ref.copy(deep=True) - if surface_type == 'land': + if 'land' in region.split('_'): ds_test_tmp[varname] = ds_test[varname].where(t_grid['sftlf'] != 0.) ds_ref_tmp[varname] = ds_ref[varname].where(t_grid['sftlf'] != 0.) - elif surface_type == 'ocean': + elif 'ocean' in region.split('_'): ds_test_tmp[varname] = ds_test[varname].where(t_grid['sftlf'] == 0.) ds_ref_tmp[varname] = ds_ref[varname].where(t_grid['sftlf'] == 0.) + print('mask done') else: ds_test_tmp = ds_test ds_ref_tmp = ds_ref - print('mask done') # spatial subset if region.lower() in ['global', 'land', 'ocean']: @@ -191,11 +194,13 @@ def main(): ds_test_dict[region] = ds_test_tmp if region not in list(ds_ref_dict.keys()): ds_ref_dict[region] = region_subset(ds_ref_tmp, regions_specs, region=region) - print('spatial subset done') + + print('spatial subset done') if debug: print('ds_test_tmp:', ds_test_tmp) - ds_test_tmp.to_netcdf('_'.join([var, 'model', region + '.nc'])) + ds_test_dict[region].to_netcdf('_'.join([var, 'model', region + '.nc'])) + ds_ref_dict[region].to_netcdf('_'.join([var, 'ref', region + '.nc'])) # compute metrics print('compute metrics start') @@ -211,6 +216,7 @@ def main(): model=model, run=run, cmec_flag=cmec, + debug=debug ) # write collective JSON --- all models / all obs / single variable diff --git a/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py b/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py index a4f3344eb..5e539b9a9 100755 --- a/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py +++ b/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py @@ -12,22 +12,21 @@ # ############################################################################### case_id = ver -# MIP = 'cmip6' # 'CMIP6' -MIP = 'cmip5' # 'CMIP6' +MIP = 'cmip6' # 'CMIP6' +# MIP = 'cmip5' # 'CMIP6' exp = 'historical' # exp = 'amip' # exp = 'picontrol' user_notes = "Provenance and results" -metrics_in_single_file = 'y' # 'y' or 'n' -regional = 'n' # 'n' +metrics_in_single_file = 'y' # 'y' or 'n' cmec = False # True # ################################################################ if MIP == 'cmip6': - modver = 'v20220928' + modver = 'v20230201' if MIP == 'cmip5': modver = 'v20220928' if exp == 'historical': @@ -35,13 +34,13 @@ # LIST OF MODEL VERSIONS TO BE TESTED - WHICH ARE EXPECTED TO BE PART OF CLIMATOLOGY FILENAME -# all_mods_dic= json.load(open('all_mip_mods-v20220927_2.json')) #all_mip_mods-v20200528.json')) -all_mods_dic = json.load(open('all_mip_mods-v20220928.json')) #all_mip_mods-v20200528.json')) +all_mods_dic = json.load(open('all_mip_mods-v20230201.json')) #all_mip_mods-v20200528.json')) # all_mods_dic = ['E3SM-1-0', 'ACCESS-CM2'] # test_data_set = all_mods_dic test_data_set = all_mods_dic[MIP][exp] test_data_set.sort() +test_data_set = ['ACCESS-CM2'] print(len(test_data_set), ' ', test_data_set) print('----------------------------------------------------------------') @@ -53,35 +52,6 @@ realm = 'Amon' # realm = 'Omon' -# #################### -# WITHOUT PARALLELIZATION - -''' -vars = ['pr', 'rltcre', 'rstcre', 'rt', 'rst', 'rlut', 'tauu', 'tauv'] -vars = ['ts', 'psl', 'tauu', 'tauv', 'tas', 'ta_850', 'ta_200', 'ua_850', 'ua_200', 'va_850', 'va_200', 'zg_500', 'pr', 'rltcre', 'rstcre', 'rt', 'rst', 'rlut'] -vars = ['tas', 'rlut', 'pr', 'ta_850', 'ta_200', 'ua_850', 'ua_200', 'va_850', 'va_200', 'zg_500'] -''' - -if regional == 'y': - vars = ['tas', 'ts', 'psl', 'sfcWind'] #, 'tauu', 'tauv'] ## THESE DO NOT WORK WITH PARALLELIZATON - # vars = ['tas'] - -# #################### -# WITH PARALLELIZATION - -''' -vars = [['psl', ], ['pr', ], ['prw', ], ['tas', ], ['uas', ], ['vas', ], ['sfcWind', ], ['tauu'], ['tauv']] -#vars = [['ta_850', ], ['ta_200', ], ['ua_850', ], ['ua_200', ], ['va_850', ], ['va_200', ], ['zg_500']] -vars = [['rlut', ], ['rsut', ], ['rsutcs', ], ['rlutcs', ], ['rsdt', ], ['rsus', ], ['rsds', ], ['rlds', ], ['rlus', ], ['rldscs', ], ['rsdscs']] -''' -# ALL BUT NOT tas ts psl sfcwind tauu tauv -if regional == 'n': - vars = [['pr', ], ['prw', ], ['uas', ], ['vas', ], ['ta_850', ], ['ta_200', ], ['ua_850', ], ['ua_200', ], ['va_850', ], ['va_200', ], ['zg_500'], ['rlut', ], ['rsut', ], ['rsutcs', ], ['rlutcs', ], ['rsdt', ], ['rsus', ], ['rsds', ], ['rlds', ], ['rlus', ], ['rldscs', ], ['rsdscs'], ['rltcre', ], ['rstcre', ], ['rt', ]] - -# vars = [['pr', ], ['rlut', ], ] -# vars = [['ts', ], ['psl', ]] - -# ################### vars = ['ts'] # MODEL SPECIFIC PARAMETERS @@ -91,39 +61,21 @@ "GFDL-ESM2G": {"variable_mapping": {"tos": "tos"}}, } - -# USER CUSTOMIZED REGIONS -if regional == regional: # 'y': - regions_specs = { - "Nino34": {"value": 0., "domain": cdutil.region.domain(latitude=(-5., 5.), longitude=(190., 240.))}, - 'ocean': {"value": 0., 'domain': cdutil.region.domain(latitude=(-90., 90))}, - 'land': {"value": 100., 'domain': cdutil.region.domain(latitude=(-90., 90))}, - 'ocean_50S50N': {"value": 0., 'domain': cdutil.region.domain(latitude=(-50., 50))}, - 'ocean_50S20S': {"value": 0., 'domain': cdutil.region.domain(latitude=(-50., -20))}, - 'ocean_20S20N': {"value": 0., 'domain': cdutil.region.domain(latitude=(-20., 20))}, - 'ocean_20N50N': {"value": 0., 'domain': cdutil.region.domain(latitude=(20., 50))}, - 'ocean_50N90N': {"value": 0., 'domain': cdutil.region.domain(latitude=(50., 90))}, - '90S50S': {"value": None, 'domain': cdutil.region.domain(latitude=(-90., -50))}, - '50S20S': {"value": None, 'domain': cdutil.region.domain(latitude=(-50., -20))}, - '20S20N': {"value": None, 'domain': cdutil.region.domain(latitude=(-20., 20))}, - '20N50N': {"value": None, 'domain': cdutil.region.domain(latitude=(20., 50))}, - '50N90N': {"value": None, 'domain': cdutil.region.domain(latitude=(50., 90))}, - 'NH': {"value": None, 'domain': cdutil.region.domain(latitude=(0., 90))}, - 'SH': {"value": None, 'domain': cdutil.region.domain(latitude=(-90., 0))}, - 'NHEX_ocean': {"value": 0., 'domain': cdutil.region.domain(latitude=(0., 90))}, - 'SHEX_ocean': {"value": 0., 'domain': cdutil.region.domain(latitude=(-90., 0))}, - 'NHEX_land': {"value": 100., 'domain': cdutil.region.domain(latitude=(20., 90))}, - 'SHEX_land': {"value": 100., 'domain': cdutil.region.domain(latitude=(-90., -20.))}} - # 'GLOBAL': {"value": 0., 'domain': cdutil.region.domain(latitude=(-90., 90.))}, - +# Region (if not given, default region applied: global, NHEX, SHEX, TROPICS) regions = { - "tas": [None, "land", "ocean", "ocean_50S50N", "NHEX_land", "SHEX_land"], - "tauu": [None, "ocean_50S50N"], - "tauv": [None, "ocean_50S50N"], - "psl": [None, "ocean", "ocean_50S50N", "NHEX_ocean", "SHEX_ocean"], - "sfcWind": [None, "ocean", "ocean_50S50N", "NHEX_ocean", "SHEX_ocean"], - "ts": [None, "ocean", "ocean_50S50N", "NHEX_ocean", "SHEX_ocean"], - "tos": [None]} + # "pr": ["global", "NHEX", "SHEX", "TROPICS", "land_NHEX", "ocean_SHEX"], + "pr": ["global"], + # "pr": ["land", "ocean", "land_TROPICS", "ocean_SHEX"], + "ua": ["global"], + "ta": ["global", "NHEX", "SHEX", "TROPICS", "land_NHEX", "ocean_SHEX"], + # "ta": ["NHEX"], + # "ta": ["land_NHEX"] + # "ta": ["global"] + # "ts": ["global", "NHEX", "SHEX", "TROPICS", "ocean", "CONUS"], + # "ts": ["global"], + "ts": ["global", "CONUS"], + # "ts": ["CONUS"], +} # USER CAN CUSTOMIZE REGIONS VALUES NAMES # regions_values = {"land": 100., "ocean": 0.} @@ -151,7 +103,7 @@ period = '1981-2005' # period = '1979-1989' -realization = 'r1i1p1' +realization = 'r1i1p1f1' # SAVE INTERPOLATED MODEL CLIMATOLOGIES ? save_test_clims = True # True or False @@ -161,42 +113,32 @@ # Templates for climatology files verd = '*' -if exp == 'amip': - filename_template = "%(variable)_%(model_version)_%(table)_amip_%(exp)r1i1p1_198101-200512-clim.nc" -if exp == 'amip': - filename_template = "CMIP5.amip.%(model_version).r1i1p1.mon.%(variable).198101-200512.AC.v20190225.nc" -# if exp == 'historical': - # filename_template = "CMIP5.historical.%(model_version).r1i1p1.mon.%(variable).198101-200512.AC.v20190307.nc" if exp == 'historical' and MIP == 'cmip5': - filename_template = MIP + '.historical.%(model_version).r1i1p1.mon.%(variable).198101-200512.AC.' + modver + '.nc' + filename_template = MIP + '.historical.%(model).r1i1p1.mon.%(variable).198101-200512.AC.' + modver + '.nc' if exp == 'amip' and MIP == 'cmip5': - filename_template = MIP + '.amip.%(model_version).r1i1p1.mon.%(variable).198101-200512.AC.' + modver + '.nc' + filename_template = MIP + '.amip.%(model).r1i1p1.mon.%(variable).198101-200512.AC.' + modver + '.nc' if exp == 'historical' and MIP == 'cmip6': - filename_template = MIP + '.historical.%(model_version).r1i1p1f1.mon.%(variable).198101-200512.AC.' + modver + '.nc' + filename_template = MIP + '.historical.%(model).r1i1p1f1.mon.%(variable).198101-200512.AC.' + modver + '.nc' if exp == 'amip' and MIP == 'cmip6': - filename_template = MIP + '.amip.%(model_version).r1i1p1f1.mon.%(variable).198101-200512.AC.' + modver + '.nc' - -# if exp == 'historical': - # filename_template = "CMIP5.historical.%(model_version).r1i1p1.mon.%(variable).198101-200512.AC.%('*').nc" + filename_template = MIP + '.amip.%(model).r1i1p1f1.mon.%(variable).198101-200512.AC.' + modver + '.nc' if exp == 'picontrol': - filename_template = "%(variable)_%(model_version)_%(table)_picontrol_%(exp)r1i1p1_01-12-clim.nc" + filename_template = "%(variable)_%(model)_%(table)_picontrol_%(exp)_r1i1p1_01-12-clim.nc" # Templates for MODEL land/sea mask (sftlf) # filename template for landsea masks ('sftlf') -# sftlf_filename_template = "/work/gleckler1/processed_data/cmip5_fixed_fields/sftlf/sftlf_%(model_version).nc" +# sftlf_filename_template = "/work/gleckler1/processed_data/cmip5_fixed_fields/sftlf/sftlf_%(model).nc" generate_sftlf = True # ESTIMATE LAND SEA MASK IF NOT FOUND -sftlf_filename_template = "cmip6.historical.%(model_version).sftlf.nc" # "sftlf_%(model_version).nc" +sftlf_filename_template = "cmip6.historical.%(model).sftlf.nc" # "sftlf_%(model).nc" # ROOT PATH FOR MODELS CLIMATOLOGIES test_data_path = '/p/user_pub/pmp/pmp_results/pmp_v1.1.2/diagnostic_results/CMIP_CLIMS/' + MIP + '/' + exp + '/' + modver + '/%(variable)/' # ROOT PATH FOR OBSERVATIONS -# reference_data_path = '/work/gleckler1/processed_data/obs/' reference_data_path = '/p/user_pub/PCMDIobs/obs4MIPs_clims/' -# custom_observations = os.path.abspath('/p/user_pub/PCMDIobs/catalogue/obs4MIPs_PCMDI_clims_byVar_catalogue_v20210805.json') -custom_observations = './obs4MIPs_PCMDI_clims_byVar_catalogue_v20210805_ljw.json' +custom_observations = os.path.abspath('/p/user_pub/PCMDIobs/catalogue/obs4MIPs_PCMDI_clims_byVar_catalogue_v20210816.json') +# custom_observations = './obs4MIPs_PCMDI_clims_byVar_catalogue_v20210805_ljw.json' print('CUSTOM OBS ARE ', custom_observations) if not os.path.exists(custom_observations): @@ -207,7 +149,7 @@ # BY INDIVIDUAL MODELS if metrics_in_single_file != 'y': metrics_output_path = '/p/user_pub/pmp/pmp_results/pmp_v1.1.2/metrics_results/mean_climate/' + MIP + '/' + exp + '/%(case_id)/%(variable)%(level)/' # INDIVIDUAL MOD FILES - output_json_template = '%(model_version).%(variable)%(level).' + MIP + '.' + exp + '.%(regrid_method).' + target_grid_string + '.' + case_id # INDIVIDUAL MOD FILES + output_json_template = '%(model).%(variable)%(level).' + MIP + '.' + exp + '.%(regrid_method).' + target_grid_string + '.' + case_id # INDIVIDUAL MOD FILES # ALL MODELS IN ONE FILE if metrics_in_single_file == 'y': metrics_output_path = '/p/user_pub/pmp/pmp_results/pmp_v1.1.2/metrics_results/mean_climate/' + MIP + '/' + exp + '/%(case_id)/' # All SAME FILE @@ -215,14 +157,7 @@ # ####################################### # DIRECTORY WHERE TO PUT INTERPOLATED MODELS' CLIMATOLOGIES -test_clims_interpolated_output = '/work/gleckler1/processed_data/metrics_package/pmp_diagnostics' + '/interpolated_model_clims_' + exp + '/' + case_id -test_clims_interpolated_output = '/p/user_pub/pmp/pmp_results/pmp_v1.1.2/diagnostic_results' + '/interpolated_model_clims_' + exp + '/' + case_id - test_clims_interpolated_output = '/p/user_pub/pmp/pmp_results/pmp_v1.1.2/diagnostic_results' + '/interpolated_model_clims/' + MIP + '/' + exp + '/' + case_id # FILENAME FOR INTERPOLATED CLIMATOLGIES OUTPUT -filename_output_template = MIP + ".%(model_version)." + exp + ".r1i1p1.mo.%(variable)%(level).%(period).interpolated.%(regrid_method).%(region).AC." + case_id + "%(ext)" - -if regional == 'n': - num_workers = 20 # 17 - granularize = ["vars"] +filename_output_template = MIP + ".%(model)." + exp + "." + realization + ".mo.%(variable)%(level).%(period).interpolated.%(regrid_method).%(region).AC." + case_id + "%(ext)" diff --git a/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py b/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py index 6df4739bb..b1bdb2a6f 100755 --- a/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py +++ b/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py @@ -5,6 +5,7 @@ import dask from genutil import StringConstructor +import xcdat as xc from pcmdi_metrics.io import xcdat_open from pcmdi_metrics.mean_climate.lib.pmp_parser import PMPMetricsParser @@ -13,17 +14,17 @@ def clim_calc(var, infile, outfile=None, outpath=None, outfilename=None, start=None, end=None): ver = datetime.datetime.now().strftime("v%Y%m%d") - print('time is ', ver) + print("ver:", ver) infilename = infile.split("/")[-1] - print("infilename is ", infilename) + print("infilename:", infilename) - # d = xcdat.open_dataset(infile, data_var=var) # use xcdat function directly + # open file d = xcdat_open(infile, data_var=var) # wrapper of xcdat open functions to enable using xml atts = d.attrs - print('type(d):', type(d)) - print('atts:', atts) + print("type(d):", type(d)) + print("atts:", atts) # CONTROL OF OUTPUT DIRECTORY AND FILE out = outfile @@ -33,7 +34,7 @@ def clim_calc(var, infile, outfile=None, outpath=None, outfilename=None, start=N outdir = outpath os.makedirs(outdir, exist_ok=True) - print("outdir is ", outdir) + print("outdir:", outdir) # CLIM PERIOD if (start is None) and (end is None): diff --git a/pcmdi_metrics/mean_climate/scripts/get_all_MIP_mods_from_CLIMS.py b/pcmdi_metrics/mean_climate/scripts/get_all_MIP_mods_from_CLIMS.py index 811e8864e..86bf73b2f 100755 --- a/pcmdi_metrics/mean_climate/scripts/get_all_MIP_mods_from_CLIMS.py +++ b/pcmdi_metrics/mean_climate/scripts/get_all_MIP_mods_from_CLIMS.py @@ -1,7 +1,7 @@ import glob import json -ver = 'v20230130' +ver = 'v20230201' pin = '/p/user_pub/pmp/pmp_results/pmp_v1.1.2/diagnostic_results/CMIP_CLIMS/%(MIP)/%(EXP)/' + ver + '/ts/' From a32d4fe82194bde98138ed700f442a9bd10f5d9c Mon Sep 17 00:00:00 2001 From: lee1043 Date: Wed, 1 Feb 2023 21:22:44 -0800 Subject: [PATCH 085/130] clean up --- pcmdi_metrics/io/default_regions_define.py | 4 ++-- pcmdi_metrics/mean_climate/deprecated/lib/dataset.py | 5 +---- .../mean_climate/lib/mean_climate_metrics_to_json.py | 4 ++-- pcmdi_metrics/mean_climate/mean_climate_driver.py | 4 ++-- .../mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py | 4 +--- pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py | 1 - pcmdi_metrics/version.py | 4 ++-- setup.py | 1 - share/DefArgsCIA.json | 2 +- 9 files changed, 11 insertions(+), 18 deletions(-) diff --git a/pcmdi_metrics/io/default_regions_define.py b/pcmdi_metrics/io/default_regions_define.py index 70a6b4ded..4511e2c43 100755 --- a/pcmdi_metrics/io/default_regions_define.py +++ b/pcmdi_metrics/io/default_regions_define.py @@ -87,7 +87,7 @@ def region_subset(ds, regions_specs, region=None): if 'longitude' in list(regions_specs[region]['domain'].keys()): lon0 = regions_specs[region]['domain']['longitude'][0] lon1 = regions_specs[region]['domain']['longitude'][1] - + # check original dataset longitude range if 'longitude' in (ds.coords.dims): lon_min = ds.longitude.min() @@ -101,7 +101,7 @@ def region_subset(ds, regions_specs, region=None): if min(lon_min, lon_max) < 0: # if original data lon range is (-180, 180) no treatment needed pass else: # if original data lon range is (0, 360), convert swap lon - ds = xc.swap_lon_axis(ds, to=(-180, 180)) + ds = xc.swap_lon_axis(ds, to=(-180, 180)) # proceed subset if 'longitude' in (ds.coords.dims): diff --git a/pcmdi_metrics/mean_climate/deprecated/lib/dataset.py b/pcmdi_metrics/mean_climate/deprecated/lib/dataset.py index 0b259675f..42090a48f 100644 --- a/pcmdi_metrics/mean_climate/deprecated/lib/dataset.py +++ b/pcmdi_metrics/mean_climate/deprecated/lib/dataset.py @@ -89,8 +89,7 @@ def create_sftlf(parameter): sft.target_grid = None sft.realization = "r0i0p0" DataSet.apply_custom_keys(sft, parameter.custom_keys, "sftlf") - if 1: - #try: + try: print('jwlee-test_create_sftlf, chk1') sftlf[test] = {"raw": sft.get("sftlf")} print('jwlee-test_create_sftlf, chk1-2') @@ -98,13 +97,11 @@ def create_sftlf(parameter): print('jwlee-test_create_sftlf, chk1-3') sftlf[test]["md5"] = sft.hash() print('jwlee-test_create_sftlf, chk1-4') - """ except Exception: print('jwlee-test_create_sftlf, chk2') sftlf[test] = {"raw": None} sftlf[test]["filename"] = None sftlf[test]["md5"] = None - """ print('jwlee-test-target_grid-create') if parameter.target_grid == "2.5x2.5": t_grid_cdms2 = cdms2.createUniformGrid(-88.875, 72, 2.5, 0, 144, 2.5) diff --git a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_to_json.py b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_to_json.py index fe45b2cd4..baafc8137 100644 --- a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_to_json.py +++ b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_to_json.py @@ -1,11 +1,11 @@ +import json from copy import deepcopy from pcmdi_metrics.io.base import Base -import json def mean_climate_metrics_to_json( - outdir, json_filename, result_dict, + outdir, json_filename, result_dict, model=None, run=None, cmec_flag=False, debug=False ): diff --git a/pcmdi_metrics/mean_climate/mean_climate_driver.py b/pcmdi_metrics/mean_climate/mean_climate_driver.py index a8509caa4..c7b1c0a59 100755 --- a/pcmdi_metrics/mean_climate/mean_climate_driver.py +++ b/pcmdi_metrics/mean_climate/mean_climate_driver.py @@ -56,7 +56,7 @@ def main(): if not bool(regions_specs): regions_specs = load_regions_specs() - + default_regions = ['global', 'NHEX', 'SHEX', 'TROPICS'] print( 'case_id: ', case_id, '\n', @@ -194,7 +194,7 @@ def main(): ds_test_dict[region] = ds_test_tmp if region not in list(ds_ref_dict.keys()): ds_ref_dict[region] = region_subset(ds_ref_tmp, regions_specs, region=region) - + print('spatial subset done') if debug: diff --git a/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py b/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py index 5e539b9a9..41e8cc745 100755 --- a/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py +++ b/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py @@ -3,8 +3,6 @@ import os import sys -import cdutil - ver = datetime.datetime.now().strftime('v%Y%m%d') # ############################################################################### @@ -52,7 +50,7 @@ realm = 'Amon' # realm = 'Omon' -vars = ['ts'] +vars = ['ts', 'pr'] # MODEL SPECIFIC PARAMETERS model_tweaks = { diff --git a/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py b/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py index b1bdb2a6f..edbff943e 100755 --- a/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py +++ b/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py @@ -5,7 +5,6 @@ import dask from genutil import StringConstructor -import xcdat as xc from pcmdi_metrics.io import xcdat_open from pcmdi_metrics.mean_climate.lib.pmp_parser import PMPMetricsParser diff --git a/pcmdi_metrics/version.py b/pcmdi_metrics/version.py index 192a9f611..886c470cf 100644 --- a/pcmdi_metrics/version.py +++ b/pcmdi_metrics/version.py @@ -1,3 +1,3 @@ __version__ = 'v2.3.1' -__git_tag_describe__ = 'v2.3.1-79-g308ee27' -__git_sha1__ = '308ee276752e3ec1ad90b13dfc8c324210145b40' +__git_tag_describe__ = 'v2.3.1-91-g13c41bf' +__git_sha1__ = '13c41bf1370b3f2845dd12f2824a843c07669385' diff --git a/setup.py b/setup.py index 5d53b57a1..155df2524 100755 --- a/setup.py +++ b/setup.py @@ -103,7 +103,6 @@ "share/test_data_files.txt", "share/cmip_model_list.json", "share/default_regions.py", - #"share/default_regions_xcdat.py", "share/DefArgsCIA.json", ), ), diff --git a/share/DefArgsCIA.json b/share/DefArgsCIA.json index cd33a055d..8507f33ba 100644 --- a/share/DefArgsCIA.json +++ b/share/DefArgsCIA.json @@ -163,4 +163,4 @@ ], "help":"A list of variables to be processed" } -} \ No newline at end of file +} From 3150879d9dd8edf673fa2f9590cc85e2b20454e5 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Wed, 1 Feb 2023 21:33:26 -0800 Subject: [PATCH 086/130] add debug option to parser parameter --- .../mean_climate/lib/create_mean_climate_parser.py | 8 ++++++++ pcmdi_metrics/mean_climate/mean_climate_driver.py | 3 +-- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/pcmdi_metrics/mean_climate/lib/create_mean_climate_parser.py b/pcmdi_metrics/mean_climate/lib/create_mean_climate_parser.py index ace10707f..bb0c585c8 100644 --- a/pcmdi_metrics/mean_climate/lib/create_mean_climate_parser.py +++ b/pcmdi_metrics/mean_climate/lib/create_mean_climate_parser.py @@ -211,6 +211,14 @@ def create_mean_climate_parser(): required=False, ) + parser.add_argument( + "--debug", + dest="debug", + help="Turn on debugging mode by printing more information to track progress", + default=False, + required=False, + ) + parser.add_argument( "--cmec", dest="cmec", diff --git a/pcmdi_metrics/mean_climate/mean_climate_driver.py b/pcmdi_metrics/mean_climate/mean_climate_driver.py index c7b1c0a59..92ec3e881 100755 --- a/pcmdi_metrics/mean_climate/mean_climate_driver.py +++ b/pcmdi_metrics/mean_climate/mean_climate_driver.py @@ -45,6 +45,7 @@ def main(): reference_data_path = parameter.reference_data_path metrics_output_path = parameter.metrics_output_path.replace('%(case_id)', case_id) + debug = parameter.debug cmec = False # temporary if realization is None: @@ -52,8 +53,6 @@ def main(): elif isinstance(realization, str): realization = [realization] - debug = True - if not bool(regions_specs): regions_specs = load_regions_specs() From dd7c2d15f0f53afe800c88603fb5b870c09a0de4 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Wed, 1 Feb 2023 21:33:47 -0800 Subject: [PATCH 087/130] clean up --- .../mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py | 2 +- .../mean_climate/scripts/allvars_parallel_mod_clims.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py b/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py index 41e8cc745..36fe287e6 100755 --- a/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py +++ b/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py @@ -38,7 +38,7 @@ # test_data_set = all_mods_dic test_data_set = all_mods_dic[MIP][exp] test_data_set.sort() -test_data_set = ['ACCESS-CM2'] +#test_data_set = ['ACCESS-CM2'] print(len(test_data_set), ' ', test_data_set) print('----------------------------------------------------------------') diff --git a/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py b/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py index 35998b66d..a09ad7d0e 100644 --- a/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py +++ b/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py @@ -7,7 +7,7 @@ exp = 'historical' # exp = 'amip' mip = 'cmip6' -verin = 'v20220924' #'v20210731' #'v20201226' +verin = 'v20230201' #'v20210731' #'v20201226' start = '1981-01' end = '2005-12' numw = 35 # None #35 @@ -17,7 +17,7 @@ # vars = ['ts', 'tas', 'uas', 'vas', 'huss', 'hurs', 'psl', 'prw', 'sfcWind', 'tauu', 'tauv', 'pr', 'rlut', 'rsut', 'rlutcs', 'rsutcs', 'rsdt', 'rsus', 'rsds', 'rlds', 'rlus', 'rldscs', 'rsdscs'] # vars = ['ta', 'ua', 'va', 'zg', 'hur', 'hus'] # vars = ['ts', 'tas', 'uas', 'vas', 'huss', 'hurs', 'psl', 'prw', 'sfcWind', 'tauu', 'tauv', 'pr', 'rlut', 'rsut', 'rlutcs', 'rsutcs', 'rsdt', 'rsus', 'rsds', 'rlds', 'rlus', 'rldscs', 'rsdscs', 'ta', 'ua', 'va', 'zg', 'hur', 'hus'] -vars = ['ts'] +vars = ['ts', 'pr'] lst1 = [] listlog = [] From 8bc2f7faecddfbc18e998d294ea5bbed5d239cde Mon Sep 17 00:00:00 2001 From: lee1043 Date: Wed, 1 Feb 2023 21:33:59 -0800 Subject: [PATCH 088/130] clean up --- .../mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py b/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py index 36fe287e6..b59a913a0 100755 --- a/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py +++ b/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py @@ -38,7 +38,7 @@ # test_data_set = all_mods_dic test_data_set = all_mods_dic[MIP][exp] test_data_set.sort() -#test_data_set = ['ACCESS-CM2'] +# test_data_set = ['ACCESS-CM2'] print(len(test_data_set), ' ', test_data_set) print('----------------------------------------------------------------') From 0233f815cbe8f4e5363bfdb6a3147130623119e2 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 2 Feb 2023 12:40:54 -0800 Subject: [PATCH 089/130] bug fix for models using 360 calendar --- .../mean_climate/pcmdi_compute_climatologies.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py b/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py index edbff943e..a673fb149 100755 --- a/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py +++ b/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py @@ -40,23 +40,29 @@ def clim_calc(var, infile, outfile=None, outpath=None, outfilename=None, start=N # DEFAULT CLIM - BASED ON ENTIRE TIME SERIES start_yr = int(d.time["time.year"][0]) start_mo = int(d.time["time.month"][0]) + start_da = int(d.time["time.day"][0]) end_yr = int(d.time["time.year"][-1]) end_mo = int(d.time["time.month"][-1]) + end_da = int(d.time["time.day"][-1]) else: # USER DEFINED PERIOD start_yr = int(start.split("-")[0]) start_mo = int(start.split("-")[1]) + start_da = 1 end_yr = int(end.split("-")[0]) end_mo = int(end.split("-")[1]) + end_da = int(d.time.dt.days_in_month.sel(time=(d.time.dt.year==end_yr))[end_mo-1]) - start_yr_str = str(start_yr) + start_yr_str = str(start_yr).zfill(4) start_mo_str = str(start_mo).zfill(2) - end_yr_str = str(end_yr) + start_da_str = str(start_da).zfill(2) + end_yr_str = str(end_yr).zfill(4) end_mo_str = str(end_mo).zfill(2) + end_da_str = str(end_da).zfill(2) # Subset given time period - d = d.sel(time=slice(start_yr_str + '-' + start_mo_str + '-01', - end_yr_str + '-' + end_mo_str + '-31')) + d = d.sel(time=slice(start_yr_str + '-' + start_mo_str + '-' + start_da_str, + end_yr_str + '-' + end_mo_str + '-' + end_da_str)) print("start_yr_str is ", start_yr_str) print("start_mo_str is ", start_mo_str) From b057806b845b24b252e2ebe4cde850404e8dfd09 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 2 Feb 2023 16:45:37 -0800 Subject: [PATCH 090/130] enable multiple ensemble members --- .../mean_climate/mean_climate_driver.py | 161 ++++++++++-------- .../param/pcmdi_MIP_EXP_pmp_parameterfile.py | 10 +- 2 files changed, 99 insertions(+), 72 deletions(-) diff --git a/pcmdi_metrics/mean_climate/mean_climate_driver.py b/pcmdi_metrics/mean_climate/mean_climate_driver.py index 92ec3e881..09e104c1d 100755 --- a/pcmdi_metrics/mean_climate/mean_climate_driver.py +++ b/pcmdi_metrics/mean_climate/mean_climate_driver.py @@ -8,6 +8,7 @@ import cdutil import numpy as np import xcdat +import glob from pcmdi_metrics import resources from pcmdi_metrics.io import load_regions_specs, region_subset @@ -32,7 +33,6 @@ def main(): reference_data_set = parameter.reference_data_set target_grid = parameter.target_grid regrid_tool = parameter.regrid_tool - regrid_method = parameter.regrid_method regrid_tool_ocn = parameter.regrid_tool_ocn save_test_clims = parameter.save_test_clims test_clims_interpolated_output = parameter.test_clims_interpolated_output @@ -49,9 +49,12 @@ def main(): cmec = False # temporary if realization is None: - realization = "" + realizations = [""] elif isinstance(realization, str): - realization = [realization] + if realization.lower() in ["all", "*"]: + find_all_realizations = True + else: + realizations = [realization] if not bool(regions_specs): regions_specs = load_regions_specs() @@ -65,7 +68,6 @@ def main(): 'reference_data_set:', reference_data_set, '\n', 'target_grid:', target_grid, '\n', 'regrid_tool:', regrid_tool, '\n', - 'regrid_method:', regrid_method, '\n', 'regrid_tool_ocn:', regrid_tool_ocn, '\n', 'save_test_clims:', save_test_clims, '\n', 'test_clims_interpolated_output:', test_clims_interpolated_output, '\n', @@ -76,7 +78,8 @@ def main(): 'regions:', regions, '\n', 'test_data_path:', test_data_path, '\n', 'reference_data_path:', reference_data_path, '\n', - 'metrics_output_path:', metrics_output_path, '\n') + 'metrics_output_path:', metrics_output_path, '\n', + 'debug:', debug, '\n') print('--- prepare mean climate metrics calculation ---') @@ -151,81 +154,103 @@ def main(): # model loop # ---------- for model in test_data_set: - for run in realization: - print('model, run:', model, run) - ds_test_dict = dict() + + if find_all_realizations: + test_data_full_path = os.path.join( + test_data_path, + filename_template).replace('%(variable)', varname).replace('%(model)', model).replace('%(realization)', '*') + ncfiles = glob.glob(test_data_full_path) + realizations = [] + for ncfile in ncfiles: + realizations.append(ncfile.split('/')[-1].split('.')[3]) + print('=================================') + print('model, runs:', model, realizations) + + for run in realizations: # identify data to load (annual cycle (AC) data is loading in) test_data_full_path = os.path.join( test_data_path, filename_template).replace('%(variable)', varname).replace('%(model)', model).replace('%(realization)', run) - # load data and regrid - ds_test = load_and_regrid(test_data_full_path, varname, level, t_grid, regrid_tool=regrid_tool, debug=debug) - print('load and regrid done') - - # ----------- - # region loop - # ----------- - for region in regions[varname]: - print('region:', region) - - # land/sea mask -- conduct masking only for variable data array, not entire data - if ('land' in region.split('_')) or ('ocean' in region.split('_')): - ds_test_tmp = ds_test.copy(deep=True) - ds_ref_tmp = ds_ref.copy(deep=True) - if 'land' in region.split('_'): - ds_test_tmp[varname] = ds_test[varname].where(t_grid['sftlf'] != 0.) - ds_ref_tmp[varname] = ds_ref[varname].where(t_grid['sftlf'] != 0.) - elif 'ocean' in region.split('_'): - ds_test_tmp[varname] = ds_test[varname].where(t_grid['sftlf'] == 0.) - ds_ref_tmp[varname] = ds_ref[varname].where(t_grid['sftlf'] == 0.) - print('mask done') - else: - ds_test_tmp = ds_test - ds_ref_tmp = ds_ref - - # spatial subset - if region.lower() in ['global', 'land', 'ocean']: - ds_test_dict[region] = ds_test_tmp - if region not in list(ds_ref_dict.keys()): - ds_ref_dict[region] = ds_ref_tmp - else: - ds_test_tmp = region_subset(ds_test_tmp, regions_specs, region=region) - ds_test_dict[region] = ds_test_tmp - if region not in list(ds_ref_dict.keys()): - ds_ref_dict[region] = region_subset(ds_ref_tmp, regions_specs, region=region) - - print('spatial subset done') - - if debug: - print('ds_test_tmp:', ds_test_tmp) - ds_test_dict[region].to_netcdf('_'.join([var, 'model', region + '.nc'])) - ds_ref_dict[region].to_netcdf('_'.join([var, 'ref', region + '.nc'])) - - # compute metrics - print('compute metrics start') - result_dict["RESULTS"][model][ref][run][region] = compute_metrics(varname, ds_test_dict[region], ds_ref_dict[region]) - - # write individual JSON - # --- single model (multi realizations if exist) / single obs (need to accumulate later) / single variable - json_filename_tmp = "_".join([model, var, target_grid, regrid_tool, regrid_method, "metrics"]) - mean_climate_metrics_to_json( - os.path.join(metrics_output_path, var), - json_filename_tmp, - result_dict, - model=model, - run=run, - cmec_flag=cmec, - debug=debug - ) + if os.path.exists(test_data_full_path): + print('-----------------------') + print('model, run:', model, run) + try: + ds_test_dict = dict() + + # load data and regrid + ds_test = load_and_regrid(test_data_full_path, varname, level, t_grid, regrid_tool=regrid_tool, debug=debug) + print('load and regrid done') + + # ----------- + # region loop + # ----------- + for region in regions[varname]: + print('region:', region) + + # land/sea mask -- conduct masking only for variable data array, not entire data + if ('land' in region.split('_')) or ('ocean' in region.split('_')): + ds_test_tmp = ds_test.copy(deep=True) + ds_ref_tmp = ds_ref.copy(deep=True) + if 'land' in region.split('_'): + ds_test_tmp[varname] = ds_test[varname].where(t_grid['sftlf'] != 0.) + ds_ref_tmp[varname] = ds_ref[varname].where(t_grid['sftlf'] != 0.) + elif 'ocean' in region.split('_'): + ds_test_tmp[varname] = ds_test[varname].where(t_grid['sftlf'] == 0.) + ds_ref_tmp[varname] = ds_ref[varname].where(t_grid['sftlf'] == 0.) + print('mask done') + else: + ds_test_tmp = ds_test + ds_ref_tmp = ds_ref + + # spatial subset + if region.lower() in ['global', 'land', 'ocean']: + ds_test_dict[region] = ds_test_tmp + if region not in list(ds_ref_dict.keys()): + ds_ref_dict[region] = ds_ref_tmp + else: + ds_test_tmp = region_subset(ds_test_tmp, regions_specs, region=region) + ds_test_dict[region] = ds_test_tmp + if region not in list(ds_ref_dict.keys()): + ds_ref_dict[region] = region_subset(ds_ref_tmp, regions_specs, region=region) + + print('spatial subset done') + + if debug: + print('ds_test_tmp:', ds_test_tmp) + ds_test_dict[region].to_netcdf('_'.join([var, 'model', region + '.nc'])) + ds_ref_dict[region].to_netcdf('_'.join([var, 'ref', region + '.nc'])) + + # compute metrics + print('compute metrics start') + result_dict["RESULTS"][model][ref][run][region] = compute_metrics(varname, ds_test_dict[region], ds_ref_dict[region]) + + # write individual JSON + # --- single simulation, obs (need to accumulate later) / single variable + json_filename_tmp = "_".join([model, var, target_grid, regrid_tool, "metrics", ref]) + mean_climate_metrics_to_json( + os.path.join(metrics_output_path, var), + json_filename_tmp, + result_dict, + model=model, + run=run, + cmec_flag=cmec, + debug=debug + ) + + except Exception as e: + print('error occured for ', model, run) + print(e) + """ # write collective JSON --- all models / all obs / single variable - json_filename = "_".join([var, target_grid, regrid_tool, regrid_method, "metrics"]) + json_filename = "_".join([var, target_grid, regrid_tool, "metrics"]) mean_climate_metrics_to_json( metrics_output_path, json_filename, result_dict, cmec_flag=cmec, ) + """ if __name__ == "__main__": diff --git a/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py b/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py index b59a913a0..403449415 100755 --- a/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py +++ b/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py @@ -24,7 +24,7 @@ # ################################################################ if MIP == 'cmip6': - modver = 'v20230201' + modver = 'v20230202' if MIP == 'cmip5': modver = 'v20220928' if exp == 'historical': @@ -32,7 +32,7 @@ # LIST OF MODEL VERSIONS TO BE TESTED - WHICH ARE EXPECTED TO BE PART OF CLIMATOLOGY FILENAME -all_mods_dic = json.load(open('all_mip_mods-v20230201.json')) #all_mip_mods-v20200528.json')) +all_mods_dic = json.load(open('all_mip_mods-' + modver +'.json')) # all_mods_dic = ['E3SM-1-0', 'ACCESS-CM2'] # test_data_set = all_mods_dic @@ -99,9 +99,9 @@ # SIMULATION PARAMETERg period = '1981-2005' -# period = '1979-1989' -realization = 'r1i1p1f1' +# realization = 'r1i1p1f1' +realization = 'all' # SAVE INTERPOLATED MODEL CLIMATOLOGIES ? save_test_clims = True # True or False @@ -159,3 +159,5 @@ # FILENAME FOR INTERPOLATED CLIMATOLGIES OUTPUT filename_output_template = MIP + ".%(model)." + exp + "." + realization + ".mo.%(variable)%(level).%(period).interpolated.%(regrid_method).%(region).AC." + case_id + "%(ext)" + +debug = False From 8430439fda7c99545613bf9dd7e5e24f5837c5d0 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 2 Feb 2023 16:46:55 -0800 Subject: [PATCH 091/130] to include more models, clean up --- .../scripts/allvars_parallel_mod_clims.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py b/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py index a09ad7d0e..f693522ae 100644 --- a/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py +++ b/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py @@ -7,10 +7,10 @@ exp = 'historical' # exp = 'amip' mip = 'cmip6' -verin = 'v20230201' #'v20210731' #'v20201226' +verin = 'v20230201' start = '1981-01' end = '2005-12' -numw = 35 # None #35 +numw = 35 # number of workers in parallel processing verout = datetime.datetime.now().strftime('v%Y%m%d') # vars = ['rlut', 'tas', 'pr'] @@ -25,7 +25,7 @@ for var in vars: pin = '/p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/' + verin + '/' + mip + '/' + exp + '/atmos/mon/' + var + '/' - lst = sorted(glob.glob(pin + '*r1i1p1f1*.xml')) + lst = sorted(glob.glob(pin + '*r1i1p1*.xml')) pathout_base = '/p/user_pub/pmp/pmp_results/pmp_v1.1.2/diagnostic_results/CMIP_CLIMS/' + mip + '/' + exp + '/' pathoutdir = os.path.join(pathout_base, verout, var) @@ -35,9 +35,9 @@ for li in lst: print(li.split('.')) - mod = li.split('.')[4] - rn = li.split('.')[5] - vv = li.split('.')[7] + mod = li.split('.')[4] # model + rn = li.split('.')[5] # realization + vv = li.split('.')[7] # variable outfilename = mip + '.' + exp + '.' + mod + '.r1i1p1f1.mon.' + var + '.nc' cmd0 = "pcmdi_compute_climatologies.py --start " + start + " --end " + end + " --infile " @@ -46,10 +46,10 @@ cmd = cmd0 + li + ' --outfile ' + pathout + ' --var ' + var lst1.append(cmd) - logf = mod + '.' + rn + '.' + vv + '.txt' + logf = mod + '.' + rn + '.' + vv listlog.append(logf) print(logf) print('Number of jobs starting is ', str(len(lst1))) -parallel_submitter(lst1, log_dir='./logs', logfilename_list=listlog, num_workers=numw) +parallel_submitter(lst1, log_dir='./logs/' + verout, logfilename_list=listlog, num_workers=numw) print('done submitting') From 596ea906efcc7349d3828c295fd45d8809bcc680 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 2 Feb 2023 16:47:16 -0800 Subject: [PATCH 092/130] clean up --- .../mean_climate/scripts/get_all_MIP_mods_from_CLIMS.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pcmdi_metrics/mean_climate/scripts/get_all_MIP_mods_from_CLIMS.py b/pcmdi_metrics/mean_climate/scripts/get_all_MIP_mods_from_CLIMS.py index 86bf73b2f..160b057f4 100755 --- a/pcmdi_metrics/mean_climate/scripts/get_all_MIP_mods_from_CLIMS.py +++ b/pcmdi_metrics/mean_climate/scripts/get_all_MIP_mods_from_CLIMS.py @@ -1,7 +1,7 @@ import glob import json -ver = 'v20230201' +ver = 'v20230202' pin = '/p/user_pub/pmp/pmp_results/pmp_v1.1.2/diagnostic_results/CMIP_CLIMS/%(MIP)/%(EXP)/' + ver + '/ts/' From 2f2ca999bc4775d02e46ea38b78215ab575c7d26 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 2 Feb 2023 16:50:30 -0800 Subject: [PATCH 093/130] clean up -- pre-commit --- pcmdi_metrics/mean_climate/mean_climate_driver.py | 2 +- .../mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py | 2 +- pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py | 2 +- pcmdi_metrics/version.py | 4 ++-- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/pcmdi_metrics/mean_climate/mean_climate_driver.py b/pcmdi_metrics/mean_climate/mean_climate_driver.py index 09e104c1d..89a122201 100755 --- a/pcmdi_metrics/mean_climate/mean_climate_driver.py +++ b/pcmdi_metrics/mean_climate/mean_climate_driver.py @@ -1,5 +1,6 @@ #!/usr/bin/env python +import glob import json import os from re import split @@ -8,7 +9,6 @@ import cdutil import numpy as np import xcdat -import glob from pcmdi_metrics import resources from pcmdi_metrics.io import load_regions_specs, region_subset diff --git a/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py b/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py index 403449415..6943c3065 100755 --- a/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py +++ b/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py @@ -32,7 +32,7 @@ # LIST OF MODEL VERSIONS TO BE TESTED - WHICH ARE EXPECTED TO BE PART OF CLIMATOLOGY FILENAME -all_mods_dic = json.load(open('all_mip_mods-' + modver +'.json')) +all_mods_dic = json.load(open('all_mip_mods-' + modver + '.json')) # all_mods_dic = ['E3SM-1-0', 'ACCESS-CM2'] # test_data_set = all_mods_dic diff --git a/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py b/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py index a673fb149..e3100b10a 100755 --- a/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py +++ b/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py @@ -51,7 +51,7 @@ def clim_calc(var, infile, outfile=None, outpath=None, outfilename=None, start=N start_da = 1 end_yr = int(end.split("-")[0]) end_mo = int(end.split("-")[1]) - end_da = int(d.time.dt.days_in_month.sel(time=(d.time.dt.year==end_yr))[end_mo-1]) + end_da = int(d.time.dt.days_in_month.sel(time=(d.time.dt.year == end_yr))[end_mo - 1]) start_yr_str = str(start_yr).zfill(4) start_mo_str = str(start_mo).zfill(2) diff --git a/pcmdi_metrics/version.py b/pcmdi_metrics/version.py index 886c470cf..f29b175c1 100644 --- a/pcmdi_metrics/version.py +++ b/pcmdi_metrics/version.py @@ -1,3 +1,3 @@ __version__ = 'v2.3.1' -__git_tag_describe__ = 'v2.3.1-91-g13c41bf' -__git_sha1__ = '13c41bf1370b3f2845dd12f2824a843c07669385' +__git_tag_describe__ = 'v2.3.1-101-g0233f81' +__git_sha1__ = '0233f815cbe8f4e5363bfdb6a3147130623119e2' From 8b59ca08c42f7e9653c7d1850f89ce92c8b9e636 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 2 Feb 2023 17:02:22 -0800 Subject: [PATCH 094/130] flake8 --- setup.py | 1 - 1 file changed, 1 deletion(-) diff --git a/setup.py b/setup.py index 8bbec17d7..78f45c10b 100755 --- a/setup.py +++ b/setup.py @@ -1,6 +1,5 @@ from __future__ import print_function -import glob import subprocess from setuptools import find_packages, setup From 750108c323d780d5105fff6c7faf581f9d3a0ffa Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 2 Feb 2023 22:14:31 -0800 Subject: [PATCH 095/130] flake8 fix --- pcmdi_metrics/mean_climate/scripts/mk_CRF_clims.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/pcmdi_metrics/mean_climate/scripts/mk_CRF_clims.py b/pcmdi_metrics/mean_climate/scripts/mk_CRF_clims.py index 20448a84d..82de5d500 100755 --- a/pcmdi_metrics/mean_climate/scripts/mk_CRF_clims.py +++ b/pcmdi_metrics/mean_climate/scripts/mk_CRF_clims.py @@ -19,9 +19,9 @@ # MIP = 'cmip5' # 'CMIP5' if MIP == 'cmip6': - ver = 'v20220927' # v20210812' #'v20210806' #'v20200526' + ver = 'v20230202' if MIP == 'cmip5': - ver = 'v20200426' #'v20191016' #'v20190820' #'v20190307' + ver = 'v20200426' # NEED TO RUN SEPERATELY FOR LW AND SW (i.e., rsut and rlut) radvar = 'rsut' @@ -34,9 +34,9 @@ for lc in lst: try: - l = lc.replace(radvar + 'cs', radvar) + li = lc.replace(radvar + 'cs', radvar) - if os.path.isfile(l): + if os.path.isfile(li): if radvar == 'rsut': fixname = 'rstcre' @@ -45,7 +45,7 @@ os.makedirs(pi.replace(radvar + 'cs', fixname), exist_ok=True) - f = cdms.open(l) + f = cdms.open(li) d = f(radvar) fc = cdms.open(lc) att_keys = fc.attributes.keys() @@ -62,7 +62,7 @@ cre.units = "W m-2" - lo = l.replace(radvar, fixname) + lo = li.replace(radvar, fixname) g = cdms.open(lo, 'w+') for att in f.attributes.keys(): @@ -73,7 +73,7 @@ print('done with ', lo) if radvar == 'rsut': - l1 = lc.replace('rsutcs', 'rsdt') # [:-1] + l1 = lc.replace('rsutcs', 'rsdt') try: f1 = cdms.open(l1) From 9800eefa7766ee7f9ad8e34b4e99fbaa29e4f2ab Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 2 Feb 2023 22:30:26 -0800 Subject: [PATCH 096/130] attempt to resolve github action issue, The Poetry configuration is invalid --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 7c50f1a5a..330ee7daa 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,4 +1,5 @@ [tool.black] +[tool.poetry.dev-dependencies] line-length = 88 target-version = ['py36'] include = '\.pyi?$' From 20dc212ddd8f0345d7e025027a7917ecc782bdff Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 2 Feb 2023 22:33:20 -0800 Subject: [PATCH 097/130] take the last change back --- pyproject.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 330ee7daa..7c50f1a5a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,4 @@ [tool.black] -[tool.poetry.dev-dependencies] line-length = 88 target-version = ['py36'] include = '\.pyi?$' From b94f35ded3bf68e61606352edb0b061258b2c10e Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 2 Feb 2023 22:36:25 -0800 Subject: [PATCH 098/130] update isort version to avoid Poetry runtime error during the github action build test. related info: https://github.com/PyCQA/isort/pull/2078 --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b23069bb0..8e2571f20 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -17,7 +17,7 @@ repos: - id: black - repo: https://github.com/timothycrosley/isort - rev: 5.10.1 + rev: 5.12.0 hooks: - id: isort From b19f02ab64a22d6aa0445fc5bbe10b04aa9e1d52 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 2 Feb 2023 22:42:56 -0800 Subject: [PATCH 099/130] update versions for pre-commit checking tools --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8e2571f20..3b5cdf40c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -4,7 +4,7 @@ fail_fast: true repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.3.0 + rev: v4.4.0 hooks: - id: trailing-whitespace args: [--markdown-linebreak-ext=md] @@ -12,7 +12,7 @@ repos: - id: check-yaml - repo: https://github.com/psf/black - rev: 22.10.0 + rev: 23.1.0 hooks: - id: black @@ -24,7 +24,7 @@ repos: # Need to use flake8 GitHub mirror due to CentOS git issue with GitLab # https://github.com/pre-commit/pre-commit/issues/1206 - repo: https://github.com/pycqa/flake8 - rev: 5.0.4 + rev: 6.0.0 hooks: - id: flake8 args: ["--config=setup.cfg"] From a79ccceeded46237be4d40bbb286c672db52faec Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 2 Feb 2023 22:43:45 -0800 Subject: [PATCH 100/130] minor change to comply with the latest version of pre-commit tool black --- cmec/scripts/pmp_param_generator.py | 1 - 1 file changed, 1 deletion(-) diff --git a/cmec/scripts/pmp_param_generator.py b/cmec/scripts/pmp_param_generator.py index 1161d54b8..b7a435942 100644 --- a/cmec/scripts/pmp_param_generator.py +++ b/cmec/scripts/pmp_param_generator.py @@ -24,7 +24,6 @@ def check_for_opt(key, settings): if __name__ == "__main__": - config_json = sys.argv[1] out_file_name = sys.argv[2] pmp_config = sys.argv[3] From c8e85a1f16a2631c56e1df0aab6e5b6f0124af99 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 2 Feb 2023 22:53:00 -0800 Subject: [PATCH 101/130] increase timeout --- .github/workflows/build_workflow.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build_workflow.yml b/.github/workflows/build_workflow.yml index 6cd2f2dab..cf462dc1d 100644 --- a/.github/workflows/build_workflow.yml +++ b/.github/workflows/build_workflow.yml @@ -25,7 +25,7 @@ jobs: needs: check-jobs-to-skip if: ${{ needs.check-jobs-to-skip.outputs.should_skip != 'true'}} || ${{ github.event_name == 'push' }} runs-on: ubuntu-latest - timeout-minutes: 2 + timeout-minutes: 10 steps: - name: Checkout Code Repository uses: actions/checkout@v2 @@ -48,7 +48,7 @@ jobs: defaults: run: shell: bash -l {0} - timeout-minutes: 10 + timeout-minutes: 5 steps: - uses: actions/checkout@v2 From d38f5c56552f898b85852f0e07e69a79deeb2c11 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 2 Feb 2023 23:24:40 -0800 Subject: [PATCH 102/130] github action build error: https://github.blog/changelog/2022-09-22-github-actions-all-actions-will-begin-running-on-node16-instead-of-node12/ --- .github/workflows/build_workflow.yml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.github/workflows/build_workflow.yml b/.github/workflows/build_workflow.yml index cf462dc1d..f5307491f 100644 --- a/.github/workflows/build_workflow.yml +++ b/.github/workflows/build_workflow.yml @@ -29,10 +29,13 @@ jobs: steps: - name: Checkout Code Repository uses: actions/checkout@v2 + with: + node-version: '16' - name: Set up Python 3.9 uses: actions/setup-python@v2 with: + node-version: '16' python-version: 3.9 # Run all pre-commit hooks on all the files. @@ -40,6 +43,8 @@ jobs: # since the action is run on a branch in detached head state - name: Install and Run Pre-commit uses: pre-commit/action@v2.0.3 + with: + node-version: '16' build: needs: check-jobs-to-skip @@ -51,6 +56,8 @@ jobs: timeout-minutes: 5 steps: - uses: actions/checkout@v2 + with: + node-version: '16' - name: Cache Conda uses: actions/cache@v2 @@ -58,6 +65,7 @@ jobs: # Increase this value to reset cache if conda/dev.yml has not changed in the workflow CACHE_NUMBER: 0 with: + node-version: '16' path: ~/conda_pkgs_dir key: ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-publish From bade8616c7a8d7bd32247f0e29860e757f8f2700 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Fri, 3 Feb 2023 00:02:05 -0800 Subject: [PATCH 103/130] use Node.js 16 for github action pre-commit-hook and build processes --- .github/workflows/build_workflow.yml | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/.github/workflows/build_workflow.yml b/.github/workflows/build_workflow.yml index f5307491f..fd7148155 100644 --- a/.github/workflows/build_workflow.yml +++ b/.github/workflows/build_workflow.yml @@ -27,17 +27,18 @@ jobs: runs-on: ubuntu-latest timeout-minutes: 10 steps: + - uses: actions/setup-node@v3 + with: + node-version: '16' - name: Checkout Code Repository uses: actions/checkout@v2 with: node-version: '16' - - name: Set up Python 3.9 uses: actions/setup-python@v2 with: node-version: '16' python-version: 3.9 - # Run all pre-commit hooks on all the files. # Getting only staged files can be tricky in case a new PR is opened # since the action is run on a branch in detached head state @@ -53,12 +54,12 @@ jobs: defaults: run: shell: bash -l {0} - timeout-minutes: 5 + timeout-minutes: 10 steps: - - uses: actions/checkout@v2 + - uses: actions/setup-node@v3 with: node-version: '16' - + - uses: actions/checkout@v2 - name: Cache Conda uses: actions/cache@v2 env: From ad4d212e0077a7c998475a19b04316c352b03d4c Mon Sep 17 00:00:00 2001 From: lee1043 Date: Fri, 3 Feb 2023 00:24:06 -0800 Subject: [PATCH 104/130] clean up --- pcmdi_metrics/version.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/pcmdi_metrics/version.py b/pcmdi_metrics/version.py index 20ed699ff..1a04a5141 100644 --- a/pcmdi_metrics/version.py +++ b/pcmdi_metrics/version.py @@ -1,9 +1,3 @@ -<<<<<<< HEAD -__version__ = 'v2.3.1' -__git_tag_describe__ = 'v2.3.1-101-g0233f81' -__git_sha1__ = '0233f815cbe8f4e5363bfdb6a3147130623119e2' -======= __version__ = "v2.4.0" __git_tag_describe__ = "v2.4.0-34-gcb5f9165" __git_sha1__ = "cb5f9165a6f7dc17f8865d7bdfa8e0e54741eceb" ->>>>>>> main From eabb77fc680edc7c83d2f23ba0ea55b5cd1b8e6f Mon Sep 17 00:00:00 2001 From: lee1043 Date: Fri, 3 Feb 2023 01:16:33 -0800 Subject: [PATCH 105/130] clean up: simplify code structure --- .../mean_climate/mean_climate_driver.py | 463 +++++++++--------- 1 file changed, 229 insertions(+), 234 deletions(-) diff --git a/pcmdi_metrics/mean_climate/mean_climate_driver.py b/pcmdi_metrics/mean_climate/mean_climate_driver.py index 89a122201..1df552c02 100755 --- a/pcmdi_metrics/mean_climate/mean_climate_driver.py +++ b/pcmdi_metrics/mean_climate/mean_climate_driver.py @@ -21,237 +21,232 @@ from pcmdi_metrics.variability_mode.lib import tree -def main(): - parser = create_mean_climate_parser() - parameter = parser.get_parameter(cmd_default_vars=False, argparse_vals_only=False) - - # parameters - case_id = parameter.case_id - test_data_set = parameter.test_data_set - realization = parameter.realization - vars = parameter.vars - reference_data_set = parameter.reference_data_set - target_grid = parameter.target_grid - regrid_tool = parameter.regrid_tool - regrid_tool_ocn = parameter.regrid_tool_ocn - save_test_clims = parameter.save_test_clims - test_clims_interpolated_output = parameter.test_clims_interpolated_output - filename_template = parameter.filename_template - sftlf_filename_template = parameter.sftlf_filename_template - generate_sftlf = parameter.generate_sftlf - regions_specs = parameter.regions_specs - regions = parameter.regions - test_data_path = parameter.test_data_path - reference_data_path = parameter.reference_data_path - metrics_output_path = parameter.metrics_output_path.replace('%(case_id)', case_id) - - debug = parameter.debug - cmec = False # temporary - - if realization is None: - realizations = [""] - elif isinstance(realization, str): - if realization.lower() in ["all", "*"]: - find_all_realizations = True - else: - realizations = [realization] - - if not bool(regions_specs): - regions_specs = load_regions_specs() - - default_regions = ['global', 'NHEX', 'SHEX', 'TROPICS'] - print( - 'case_id: ', case_id, '\n', - 'test_data_set:', test_data_set, '\n', - 'realization:', realization, '\n', - 'vars:', vars, '\n', - 'reference_data_set:', reference_data_set, '\n', - 'target_grid:', target_grid, '\n', - 'regrid_tool:', regrid_tool, '\n', - 'regrid_tool_ocn:', regrid_tool_ocn, '\n', - 'save_test_clims:', save_test_clims, '\n', - 'test_clims_interpolated_output:', test_clims_interpolated_output, '\n', - 'filename_template:', filename_template, '\n', - 'sftlf_filename_template:', sftlf_filename_template, '\n', - 'generate_sftlf:', generate_sftlf, '\n', - 'regions_specs:', regions_specs, '\n', - 'regions:', regions, '\n', - 'test_data_path:', test_data_path, '\n', - 'reference_data_path:', reference_data_path, '\n', - 'metrics_output_path:', metrics_output_path, '\n', - 'debug:', debug, '\n') - - print('--- prepare mean climate metrics calculation ---') - - # generate target grid - if target_grid == "2.5x2.5": - # target grid for regridding - t_grid = xcdat.create_uniform_grid(-88.875, 88.625, 2.5, 0, 357.5, 2.5) - if debug: - print('type(t_grid):', type(t_grid)) # Expected type is 'xarray.core.dataset.Dataset' - print('t_grid:', t_grid) - # identical target grid in cdms2 to use generateLandSeaMask function that is yet to exist in xcdat - t_grid_cdms2 = cdms2.createUniformGrid(-88.875, 72, 2.5, 0, 144, 2.5) - # generate land sea mask for the target grid - sft = cdutil.generateLandSeaMask(t_grid_cdms2) - if debug: - print('sft:', sft) - print('sft.getAxisList():', sft.getAxisList()) - # add sft to target grid dataset - t_grid['sftlf'] = (['lat', 'lon'], np.array(sft)) - if debug: - print('t_grid (after sftlf added):', t_grid) - t_grid.to_netcdf('target_grid.nc') - - # load obs catalogue json - egg_pth = resources.resource_path() - obs_file_name = "obs_info_dictionary.json" - obs_file_path = os.path.join(egg_pth, obs_file_name) - with open(obs_file_path) as fo: - obs_dict = json.loads(fo.read()) - # if debug: - # print('obs_dict:', json.dumps(obs_dict, indent=4, sort_keys=True)) - - print('--- start mean climate metrics calculation ---') - - # ------------- - # variable loop - # ------------- - for var in vars: - - if '_' in var or '-' in var: - varname = split('_|-', var)[0] - level = float(split('_|-', var)[1]) - else: - varname = var - level = None - - if varname not in list(regions.keys()): - regions[varname] = default_regions - - print('varname:', varname) - print('level:', level) - - # set dictionary for .json record - result_dict = tree() - - # ---------------- - # observation loop - # ---------------- - for ref in reference_data_set: - print('ref:', ref) - # identify data to load (annual cycle (AC) data is loading in) - ref_dataset_name = obs_dict[varname][ref] - ref_data_full_path = os.path.join( - reference_data_path, - obs_dict[varname][ref_dataset_name]["template"]) - print('ref_data_full_path:', ref_data_full_path) - # load data and regrid - ds_ref = load_and_regrid(ref_data_full_path, varname, level, t_grid, decode_times=False, regrid_tool=regrid_tool, debug=debug) - ds_ref_dict = dict() - - # ---------- - # model loop - # ---------- - for model in test_data_set: - - if find_all_realizations: - test_data_full_path = os.path.join( - test_data_path, - filename_template).replace('%(variable)', varname).replace('%(model)', model).replace('%(realization)', '*') - ncfiles = glob.glob(test_data_full_path) - realizations = [] - for ncfile in ncfiles: - realizations.append(ncfile.split('/')[-1].split('.')[3]) - print('=================================') - print('model, runs:', model, realizations) - - for run in realizations: - # identify data to load (annual cycle (AC) data is loading in) - test_data_full_path = os.path.join( - test_data_path, - filename_template).replace('%(variable)', varname).replace('%(model)', model).replace('%(realization)', run) - if os.path.exists(test_data_full_path): - print('-----------------------') - print('model, run:', model, run) - try: - ds_test_dict = dict() - - # load data and regrid - ds_test = load_and_regrid(test_data_full_path, varname, level, t_grid, regrid_tool=regrid_tool, debug=debug) - print('load and regrid done') - - # ----------- - # region loop - # ----------- - for region in regions[varname]: - print('region:', region) - - # land/sea mask -- conduct masking only for variable data array, not entire data - if ('land' in region.split('_')) or ('ocean' in region.split('_')): - ds_test_tmp = ds_test.copy(deep=True) - ds_ref_tmp = ds_ref.copy(deep=True) - if 'land' in region.split('_'): - ds_test_tmp[varname] = ds_test[varname].where(t_grid['sftlf'] != 0.) - ds_ref_tmp[varname] = ds_ref[varname].where(t_grid['sftlf'] != 0.) - elif 'ocean' in region.split('_'): - ds_test_tmp[varname] = ds_test[varname].where(t_grid['sftlf'] == 0.) - ds_ref_tmp[varname] = ds_ref[varname].where(t_grid['sftlf'] == 0.) - print('mask done') - else: - ds_test_tmp = ds_test - ds_ref_tmp = ds_ref - - # spatial subset - if region.lower() in ['global', 'land', 'ocean']: - ds_test_dict[region] = ds_test_tmp - if region not in list(ds_ref_dict.keys()): - ds_ref_dict[region] = ds_ref_tmp - else: - ds_test_tmp = region_subset(ds_test_tmp, regions_specs, region=region) - ds_test_dict[region] = ds_test_tmp - if region not in list(ds_ref_dict.keys()): - ds_ref_dict[region] = region_subset(ds_ref_tmp, regions_specs, region=region) - - print('spatial subset done') - - if debug: - print('ds_test_tmp:', ds_test_tmp) - ds_test_dict[region].to_netcdf('_'.join([var, 'model', region + '.nc'])) - ds_ref_dict[region].to_netcdf('_'.join([var, 'ref', region + '.nc'])) - - # compute metrics - print('compute metrics start') - result_dict["RESULTS"][model][ref][run][region] = compute_metrics(varname, ds_test_dict[region], ds_ref_dict[region]) - - # write individual JSON - # --- single simulation, obs (need to accumulate later) / single variable - json_filename_tmp = "_".join([model, var, target_grid, regrid_tool, "metrics", ref]) - mean_climate_metrics_to_json( - os.path.join(metrics_output_path, var), - json_filename_tmp, - result_dict, - model=model, - run=run, - cmec_flag=cmec, - debug=debug - ) - - except Exception as e: - print('error occured for ', model, run) - print(e) - - """ - # write collective JSON --- all models / all obs / single variable - json_filename = "_".join([var, target_grid, regrid_tool, "metrics"]) - mean_climate_metrics_to_json( - metrics_output_path, - json_filename, - result_dict, - cmec_flag=cmec, - ) - """ - - -if __name__ == "__main__": - main() +parser = create_mean_climate_parser() +parameter = parser.get_parameter(cmd_default_vars=False, argparse_vals_only=False) + +# parameters +case_id = parameter.case_id +test_data_set = parameter.test_data_set +realization = parameter.realization +vars = parameter.vars +reference_data_set = parameter.reference_data_set +target_grid = parameter.target_grid +regrid_tool = parameter.regrid_tool +regrid_tool_ocn = parameter.regrid_tool_ocn +save_test_clims = parameter.save_test_clims +test_clims_interpolated_output = parameter.test_clims_interpolated_output +filename_template = parameter.filename_template +sftlf_filename_template = parameter.sftlf_filename_template +generate_sftlf = parameter.generate_sftlf +regions_specs = parameter.regions_specs +regions = parameter.regions +test_data_path = parameter.test_data_path +reference_data_path = parameter.reference_data_path +metrics_output_path = parameter.metrics_output_path.replace('%(case_id)', case_id) + +debug = parameter.debug +cmec = False # temporary + +if realization is None: + realizations = [""] +elif isinstance(realization, str): + if realization.lower() in ["all", "*"]: + find_all_realizations = True + else: + realizations = [realization] + +if not bool(regions_specs): + regions_specs = load_regions_specs() + +default_regions = ['global', 'NHEX', 'SHEX', 'TROPICS'] +print( + 'case_id: ', case_id, '\n', + 'test_data_set:', test_data_set, '\n', + 'realization:', realization, '\n', + 'vars:', vars, '\n', + 'reference_data_set:', reference_data_set, '\n', + 'target_grid:', target_grid, '\n', + 'regrid_tool:', regrid_tool, '\n', + 'regrid_tool_ocn:', regrid_tool_ocn, '\n', + 'save_test_clims:', save_test_clims, '\n', + 'test_clims_interpolated_output:', test_clims_interpolated_output, '\n', + 'filename_template:', filename_template, '\n', + 'sftlf_filename_template:', sftlf_filename_template, '\n', + 'generate_sftlf:', generate_sftlf, '\n', + 'regions_specs:', regions_specs, '\n', + 'regions:', regions, '\n', + 'test_data_path:', test_data_path, '\n', + 'reference_data_path:', reference_data_path, '\n', + 'metrics_output_path:', metrics_output_path, '\n', + 'debug:', debug, '\n') + +print('--- prepare mean climate metrics calculation ---') + +# generate target grid +if target_grid == "2.5x2.5": + # target grid for regridding + t_grid = xcdat.create_uniform_grid(-88.875, 88.625, 2.5, 0, 357.5, 2.5) + if debug: + print('type(t_grid):', type(t_grid)) # Expected type is 'xarray.core.dataset.Dataset' + print('t_grid:', t_grid) + # identical target grid in cdms2 to use generateLandSeaMask function that is yet to exist in xcdat + t_grid_cdms2 = cdms2.createUniformGrid(-88.875, 72, 2.5, 0, 144, 2.5) + # generate land sea mask for the target grid + sft = cdutil.generateLandSeaMask(t_grid_cdms2) + if debug: + print('sft:', sft) + print('sft.getAxisList():', sft.getAxisList()) + # add sft to target grid dataset + t_grid['sftlf'] = (['lat', 'lon'], np.array(sft)) + if debug: + print('t_grid (after sftlf added):', t_grid) + t_grid.to_netcdf('target_grid.nc') + +# load obs catalogue json +egg_pth = resources.resource_path() +obs_file_name = "obs_info_dictionary.json" +obs_file_path = os.path.join(egg_pth, obs_file_name) +with open(obs_file_path) as fo: + obs_dict = json.loads(fo.read()) +# if debug: + # print('obs_dict:', json.dumps(obs_dict, indent=4, sort_keys=True)) + +print('--- start mean climate metrics calculation ---') + +# ------------- +# variable loop +# ------------- +for var in vars: + + if '_' in var or '-' in var: + varname = split('_|-', var)[0] + level = float(split('_|-', var)[1]) + else: + varname = var + level = None + + if varname not in list(regions.keys()): + regions[varname] = default_regions + + print('varname:', varname) + print('level:', level) + + # set dictionary for .json record + result_dict = tree() + + # ---------------- + # observation loop + # ---------------- + for ref in reference_data_set: + print('ref:', ref) + # identify data to load (annual cycle (AC) data is loading in) + ref_dataset_name = obs_dict[varname][ref] + ref_data_full_path = os.path.join( + reference_data_path, + obs_dict[varname][ref_dataset_name]["template"]) + print('ref_data_full_path:', ref_data_full_path) + # load data and regrid + ds_ref = load_and_regrid(ref_data_full_path, varname, level, t_grid, decode_times=False, regrid_tool=regrid_tool, debug=debug) + ds_ref_dict = dict() + + # ---------- + # model loop + # ---------- + for model in test_data_set: + + if find_all_realizations: + test_data_full_path = os.path.join( + test_data_path, + filename_template).replace('%(variable)', varname).replace('%(model)', model).replace('%(realization)', '*') + ncfiles = glob.glob(test_data_full_path) + realizations = [] + for ncfile in ncfiles: + realizations.append(ncfile.split('/')[-1].split('.')[3]) + print('=================================') + print('model, runs:', model, realizations) + + for run in realizations: + # identify data to load (annual cycle (AC) data is loading in) + test_data_full_path = os.path.join( + test_data_path, + filename_template).replace('%(variable)', varname).replace('%(model)', model).replace('%(realization)', run) + if os.path.exists(test_data_full_path): + print('-----------------------') + print('model, run:', model, run) + try: + ds_test_dict = dict() + + # load data and regrid + ds_test = load_and_regrid(test_data_full_path, varname, level, t_grid, regrid_tool=regrid_tool, debug=debug) + print('load and regrid done') + + # ----------- + # region loop + # ----------- + for region in regions[varname]: + print('region:', region) + + # land/sea mask -- conduct masking only for variable data array, not entire data + if ('land' in region.split('_')) or ('ocean' in region.split('_')): + ds_test_tmp = ds_test.copy(deep=True) + ds_ref_tmp = ds_ref.copy(deep=True) + if 'land' in region.split('_'): + ds_test_tmp[varname] = ds_test[varname].where(t_grid['sftlf'] != 0.) + ds_ref_tmp[varname] = ds_ref[varname].where(t_grid['sftlf'] != 0.) + elif 'ocean' in region.split('_'): + ds_test_tmp[varname] = ds_test[varname].where(t_grid['sftlf'] == 0.) + ds_ref_tmp[varname] = ds_ref[varname].where(t_grid['sftlf'] == 0.) + print('mask done') + else: + ds_test_tmp = ds_test + ds_ref_tmp = ds_ref + + # spatial subset + if region.lower() in ['global', 'land', 'ocean']: + ds_test_dict[region] = ds_test_tmp + if region not in list(ds_ref_dict.keys()): + ds_ref_dict[region] = ds_ref_tmp + else: + ds_test_tmp = region_subset(ds_test_tmp, regions_specs, region=region) + ds_test_dict[region] = ds_test_tmp + if region not in list(ds_ref_dict.keys()): + ds_ref_dict[region] = region_subset(ds_ref_tmp, regions_specs, region=region) + + print('spatial subset done') + + if debug: + print('ds_test_tmp:', ds_test_tmp) + ds_test_dict[region].to_netcdf('_'.join([var, 'model', region + '.nc'])) + ds_ref_dict[region].to_netcdf('_'.join([var, 'ref', region + '.nc'])) + + # compute metrics + print('compute metrics start') + result_dict["RESULTS"][model][ref][run][region] = compute_metrics(varname, ds_test_dict[region], ds_ref_dict[region]) + + # write individual JSON + # --- single simulation, obs (need to accumulate later) / single variable + json_filename_tmp = "_".join([model, var, target_grid, regrid_tool, "metrics", ref]) + mean_climate_metrics_to_json( + os.path.join(metrics_output_path, var), + json_filename_tmp, + result_dict, + model=model, + run=run, + cmec_flag=cmec, + debug=debug + ) + + except Exception as e: + print('error occured for ', model, run) + print(e) + + """ + # write collective JSON --- all models / all obs / single variable + json_filename = "_".join([var, target_grid, regrid_tool, "metrics"]) + mean_climate_metrics_to_json( + metrics_output_path, + json_filename, + result_dict, + cmec_flag=cmec, + ) + """ From 4380c56620f69da0115242ce3ab3b1abf0f6ddd9 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Fri, 3 Feb 2023 01:18:32 -0800 Subject: [PATCH 106/130] clean up --- pcmdi_metrics/mean_climate/scripts/mk_CRF_clims.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pcmdi_metrics/mean_climate/scripts/mk_CRF_clims.py b/pcmdi_metrics/mean_climate/scripts/mk_CRF_clims.py index 82de5d500..078b60aa0 100755 --- a/pcmdi_metrics/mean_climate/scripts/mk_CRF_clims.py +++ b/pcmdi_metrics/mean_climate/scripts/mk_CRF_clims.py @@ -24,13 +24,13 @@ ver = 'v20200426' # NEED TO RUN SEPERATELY FOR LW AND SW (i.e., rsut and rlut) -radvar = 'rsut' -# radvar = 'rlut' +# radvar = 'rsut' +radvar = 'rlut' pit = '/p/user_pub/pmp/pmp_results/pmp_v1.1.2/diagnostic_results/CMIP_CLIMS/' + MIP + '/' + exp + '/' + ver + '/' pi = pit + radvar + 'cs/' -lst = glob.glob(pi + '*' + radvar + 'cs' '*.nc') +lst = sorted(glob.glob(pi + '*' + radvar + 'cs' '*.nc')) for lc in lst: try: From b3926b151d80b9c89e21ae9d3012edf349bfa93c Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Sun, 5 Feb 2023 18:56:09 -0800 Subject: [PATCH 107/130] pre-commit tool comply --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 8229d57a4..a378e8ecb 100644 --- a/setup.py +++ b/setup.py @@ -1,10 +1,10 @@ from __future__ import print_function import subprocess +import sys from setuptools import find_packages, setup - if "--enable-devel" in sys.argv: install_dev = True sys.argv.remove("--enable-devel") From 0e756773e0c17f2ca3d424253a47868bfda4f339 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Mon, 6 Feb 2023 09:32:12 -0800 Subject: [PATCH 108/130] simplify content in driver file for annual cycle: move calculation function to lib --- .../mean_climate/lib/calculate_climatology.py | 106 +++++++++ .../pcmdi_compute_climatologies.py | 225 +++++------------- 2 files changed, 170 insertions(+), 161 deletions(-) create mode 100644 pcmdi_metrics/mean_climate/lib/calculate_climatology.py diff --git a/pcmdi_metrics/mean_climate/lib/calculate_climatology.py b/pcmdi_metrics/mean_climate/lib/calculate_climatology.py new file mode 100644 index 000000000..28f05920d --- /dev/null +++ b/pcmdi_metrics/mean_climate/lib/calculate_climatology.py @@ -0,0 +1,106 @@ +import datetime +import os + +import dask +from genutil import StringConstructor + +from pcmdi_metrics.io import xcdat_open + + +def calculate_climatology( + var, infile, + outfile=None, outpath=None, outfilename=None, + start=None, end=None, ver=None): + + if ver is None: + ver=datetime.datetime.now().strftime("v%Y%m%d") + + print("ver:", ver) + + infilename = infile.split("/")[-1] + print("infilename:", infilename) + + # open file + d = xcdat_open(infile, data_var=var) # wrapper of xcdat open functions to enable using xml + atts = d.attrs + + print("type(d):", type(d)) + print("atts:", atts) + + # CONTROL OF OUTPUT DIRECTORY AND FILE + out = outfile + if outpath is None: + outdir = os.path.dirname(outfile) + else: + outdir = outpath + os.makedirs(outdir, exist_ok=True) + + print("outdir:", outdir) + + # CLIM PERIOD + if (start is None) and (end is None): + # DEFAULT CLIM - BASED ON ENTIRE TIME SERIES + start_yr = int(d.time["time.year"][0]) + start_mo = int(d.time["time.month"][0]) + start_da = int(d.time["time.day"][0]) + end_yr = int(d.time["time.year"][-1]) + end_mo = int(d.time["time.month"][-1]) + end_da = int(d.time["time.day"][-1]) + else: + # USER DEFINED PERIOD + start_yr = int(start.split("-")[0]) + start_mo = int(start.split("-")[1]) + start_da = 1 + end_yr = int(end.split("-")[0]) + end_mo = int(end.split("-")[1]) + end_da = int(d.time.dt.days_in_month.sel(time=(d.time.dt.year == end_yr))[end_mo - 1]) + + start_yr_str = str(start_yr).zfill(4) + start_mo_str = str(start_mo).zfill(2) + start_da_str = str(start_da).zfill(2) + end_yr_str = str(end_yr).zfill(4) + end_mo_str = str(end_mo).zfill(2) + end_da_str = str(end_da).zfill(2) + + # Subset given time period + d = d.sel(time=slice(start_yr_str + '-' + start_mo_str + '-' + start_da_str, + end_yr_str + '-' + end_mo_str + '-' + end_da_str)) + + print("start_yr_str is ", start_yr_str) + print("start_mo_str is ", start_mo_str) + print("end_yr_str is ", end_yr_str) + print("end_mo_str is ", end_mo_str) + + # Calculate climatology + dask.config.set(**{'array.slicing.split_large_chunks': True}) + d_clim = d.temporal.climatology(var, freq="season", weighted=True, season_config={"dec_mode": "DJF", "drop_incomplete_djf": True},) + d_ac = d.temporal.climatology(var, freq="month", weighted=True) + + d_clim_dict = dict() + + d_clim_dict['DJF'] = d_clim.isel(time=0) + d_clim_dict['MAM'] = d_clim.isel(time=1) + d_clim_dict['JJA'] = d_clim.isel(time=2) + d_clim_dict['SON'] = d_clim.isel(time=3) + d_clim_dict['AC'] = d_ac + + for s in ["AC", "DJF", "MAM", "JJA", "SON"]: + addf = ( + "." + + start_yr_str + + start_mo_str + + "-" + + end_yr_str + + end_mo_str + + "." + + s + + "." + + ver + + ".nc" + ) + if outfilename is not None: + out = os.path.join(outdir, outfilename) + out_season = out.replace(".nc", addf) + + print("output file is", out_season) + d_clim_dict[s].to_netcdf(out_season) # global attributes are automatically saved as well diff --git a/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py b/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py index e3100b10a..62a8c5307 100755 --- a/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py +++ b/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py @@ -1,168 +1,71 @@ #!/usr/bin/env python import datetime -import os -import dask from genutil import StringConstructor -from pcmdi_metrics.io import xcdat_open from pcmdi_metrics.mean_climate.lib.pmp_parser import PMPMetricsParser - - -def clim_calc(var, infile, outfile=None, outpath=None, outfilename=None, start=None, end=None): - - ver = datetime.datetime.now().strftime("v%Y%m%d") - print("ver:", ver) - - infilename = infile.split("/")[-1] - print("infilename:", infilename) - - # open file - d = xcdat_open(infile, data_var=var) # wrapper of xcdat open functions to enable using xml - atts = d.attrs - - print("type(d):", type(d)) - print("atts:", atts) - - # CONTROL OF OUTPUT DIRECTORY AND FILE - out = outfile - if outpath is None: - outdir = os.path.dirname(outfile) - else: - outdir = outpath - os.makedirs(outdir, exist_ok=True) - - print("outdir:", outdir) - - # CLIM PERIOD - if (start is None) and (end is None): - # DEFAULT CLIM - BASED ON ENTIRE TIME SERIES - start_yr = int(d.time["time.year"][0]) - start_mo = int(d.time["time.month"][0]) - start_da = int(d.time["time.day"][0]) - end_yr = int(d.time["time.year"][-1]) - end_mo = int(d.time["time.month"][-1]) - end_da = int(d.time["time.day"][-1]) - else: - # USER DEFINED PERIOD - start_yr = int(start.split("-")[0]) - start_mo = int(start.split("-")[1]) - start_da = 1 - end_yr = int(end.split("-")[0]) - end_mo = int(end.split("-")[1]) - end_da = int(d.time.dt.days_in_month.sel(time=(d.time.dt.year == end_yr))[end_mo - 1]) - - start_yr_str = str(start_yr).zfill(4) - start_mo_str = str(start_mo).zfill(2) - start_da_str = str(start_da).zfill(2) - end_yr_str = str(end_yr).zfill(4) - end_mo_str = str(end_mo).zfill(2) - end_da_str = str(end_da).zfill(2) - - # Subset given time period - d = d.sel(time=slice(start_yr_str + '-' + start_mo_str + '-' + start_da_str, - end_yr_str + '-' + end_mo_str + '-' + end_da_str)) - - print("start_yr_str is ", start_yr_str) - print("start_mo_str is ", start_mo_str) - print("end_yr_str is ", end_yr_str) - print("end_mo_str is ", end_mo_str) - - # Calculate climatology - dask.config.set(**{'array.slicing.split_large_chunks': True}) - d_clim = d.temporal.climatology(var, freq="season", weighted=True, season_config={"dec_mode": "DJF", "drop_incomplete_djf": True},) - d_ac = d.temporal.climatology(var, freq="month", weighted=True) - - d_clim_dict = dict() - - d_clim_dict['DJF'] = d_clim.isel(time=0) - d_clim_dict['MAM'] = d_clim.isel(time=1) - d_clim_dict['JJA'] = d_clim.isel(time=2) - d_clim_dict['SON'] = d_clim.isel(time=3) - d_clim_dict['AC'] = d_ac - - for s in ["AC", "DJF", "MAM", "JJA", "SON"]: - addf = ( - "." - + start_yr_str - + start_mo_str - + "-" - + end_yr_str - + end_mo_str - + "." - + s - + "." - + ver - + ".nc" - ) - if outfilename is not None: - out = os.path.join(outdir, outfilename) - out_season = out.replace(".nc", addf) - - print("output file is", out_season) - d_clim_dict[s].to_netcdf(out_season) # global attributes are automatically saved as well - - -if __name__ == "__main__": - - ver = datetime.datetime.now().strftime("v%Y%m%d") - - P = PMPMetricsParser() - - P.add_argument( - "--vars", dest="vars", help="List of variables", nargs="+", required=False - ) - P.add_argument("--infile", dest="infile", help="Defines infile", required=False) - P.add_argument( - "--outfile", dest="outfile", help="Defines output path and filename", required=False - ) - P.add_argument("--outpath", dest="outpath", help="Defines outpath only", required=False) - P.add_argument( - "--outfilename", - dest="outfilename", - help="Defines out filename only", - required=False, - ) - P.add_argument( - "--start", dest="start", help="Defines start year and month", required=False - ) - P.add_argument("--end", dest="end", help="Defines end year and month", required=False) - - args = P.get_parameter() - - infile_template = args.infile - outfile_template = args.outfile - outpath_template = args.outpath - outfilename_template = args.outfilename - varlist = args.vars - start = args.start - end = args.end - - print("start and end are ", start, " ", end) - print("variable list: ", varlist) - - InFile = StringConstructor(infile_template) - OutFile = StringConstructor(outfile_template) - OutFileName = StringConstructor(outfilename_template) - OutPath = StringConstructor(outpath_template) - - for var in varlist: - # Build filenames - InFile.variable = var - OutFile.variable = var - OutFileName.variable = var - OutPath.variable = var - infile = InFile() - outfile = OutFile() - outfilename = OutFileName() - outpath = OutPath() - - print('var:', var) - print('infile:', infile) - print('outfile:', outfile) - print('outfilename:', outfilename) - print('outpath:', outpath) - - # calculate climatologies for this variable - clim_calc(var, infile, outfile, outpath, outfilename, start, end) +from pcmdi_metrics.mean_climate.lib import calculate_climatology + + +ver = datetime.datetime.now().strftime("v%Y%m%d") + +P = PMPMetricsParser() + +P.add_argument( + "--vars", dest="vars", help="List of variables", nargs="+", required=False +) +P.add_argument("--infile", dest="infile", help="Defines infile", required=False) +P.add_argument( + "--outfile", dest="outfile", help="Defines output path and filename", required=False +) +P.add_argument("--outpath", dest="outpath", help="Defines outpath only", required=False) +P.add_argument( + "--outfilename", + dest="outfilename", + help="Defines out filename only", + required=False, +) +P.add_argument( + "--start", dest="start", help="Defines start year and month", required=False +) +P.add_argument("--end", dest="end", help="Defines end year and month", required=False) + +args = P.get_parameter() + +infile_template = args.infile +outfile_template = args.outfile +outpath_template = args.outpath +outfilename_template = args.outfilename +varlist = args.vars +start = args.start +end = args.end + +print("start and end are ", start, " ", end) +print("variable list: ", varlist) +print("ver:", ver) + +InFile = StringConstructor(infile_template) +OutFile = StringConstructor(outfile_template) +OutFileName = StringConstructor(outfilename_template) +OutPath = StringConstructor(outpath_template) + +for var in varlist: + # Build filenames + InFile.variable = var + OutFile.variable = var + OutFileName.variable = var + OutPath.variable = var + infile = InFile() + outfile = OutFile() + outfilename = OutFileName() + outpath = OutPath() + + print('var:', var) + print('infile:', infile) + print('outfile:', outfile) + print('outfilename:', outfilename) + print('outpath:', outpath) + + # calculate climatologies for this variable + calculate_climatology(var, infile, outfile, outpath, outfilename, start, end, ver) From 3fdbdca3064c1f39517386527659b258a7c498d4 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Mon, 6 Feb 2023 09:33:02 -0800 Subject: [PATCH 109/130] simplify content in driver file for annual cycle: move calculation function to lib --- pcmdi_metrics/mean_climate/lib/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pcmdi_metrics/mean_climate/lib/__init__.py b/pcmdi_metrics/mean_climate/lib/__init__.py index e9f0a8713..d62544d27 100644 --- a/pcmdi_metrics/mean_climate/lib/__init__.py +++ b/pcmdi_metrics/mean_climate/lib/__init__.py @@ -17,3 +17,4 @@ from .create_mean_climate_parser import create_mean_climate_parser # noqa from .load_and_regrid import load_and_regrid # noqa from .mean_climate_metrics_to_json import mean_climate_metrics_to_json # noqa +from .calculate_climatology import calculate_climatology # noqa From 03fc30e0e087548b89a9b1d3ee938dc561ed6e1c Mon Sep 17 00:00:00 2001 From: lee1043 Date: Mon, 6 Feb 2023 09:33:30 -0800 Subject: [PATCH 110/130] test run --- .../mean_climate/scripts/allvars_parallel_mod_clims.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py b/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py index f693522ae..33ed2ed0f 100644 --- a/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py +++ b/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py @@ -17,7 +17,9 @@ # vars = ['ts', 'tas', 'uas', 'vas', 'huss', 'hurs', 'psl', 'prw', 'sfcWind', 'tauu', 'tauv', 'pr', 'rlut', 'rsut', 'rlutcs', 'rsutcs', 'rsdt', 'rsus', 'rsds', 'rlds', 'rlus', 'rldscs', 'rsdscs'] # vars = ['ta', 'ua', 'va', 'zg', 'hur', 'hus'] # vars = ['ts', 'tas', 'uas', 'vas', 'huss', 'hurs', 'psl', 'prw', 'sfcWind', 'tauu', 'tauv', 'pr', 'rlut', 'rsut', 'rlutcs', 'rsutcs', 'rsdt', 'rsus', 'rsds', 'rlds', 'rlus', 'rldscs', 'rsdscs', 'ta', 'ua', 'va', 'zg', 'hur', 'hus'] -vars = ['ts', 'pr'] +# vars = ['ts', 'pr'] +# vars = ['tas', 'uas', 'vas', 'huss', 'hurs', 'psl', 'prw', 'sfcWind', 'tauu', 'tauv', 'rlut', 'rsut', 'rlutcs', 'rsutcs', 'rsdt', 'rsus', 'rsds', 'rlds', 'rlus', 'rldscs', 'rsdscs'] +vars = ['ta', 'ua', 'va', 'zg', 'hur'] lst1 = [] listlog = [] From 295aca76957eb9e255a9665b4bc98e7d9d837a74 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Mon, 6 Feb 2023 20:00:46 -0800 Subject: [PATCH 111/130] clean up --- .../mean_climate/lib/compute_metrics.py | 4 +- pcmdi_metrics/mean_climate/scripts/README.md | 7 +- .../scripts/post_process_merge_jsons.py | 114 ++++++++++++++++++ 3 files changed, 122 insertions(+), 3 deletions(-) create mode 100755 pcmdi_metrics/mean_climate/scripts/post_process_merge_jsons.py diff --git a/pcmdi_metrics/mean_climate/lib/compute_metrics.py b/pcmdi_metrics/mean_climate/lib/compute_metrics.py index 347e3ba50..1b85f5723 100644 --- a/pcmdi_metrics/mean_climate/lib/compute_metrics.py +++ b/pcmdi_metrics/mean_climate/lib/compute_metrics.py @@ -36,8 +36,8 @@ def compute_metrics(Var, dm, do): do['time_bnds'] = dm['time_bnds'] print('do.time: ', do['time']) - dm.to_netcdf('dm.nc') - do.to_netcdf('do.nc') + # dm.to_netcdf('dm.nc') + # do.to_netcdf('do.nc') metrics_dictionary = {} diff --git a/pcmdi_metrics/mean_climate/scripts/README.md b/pcmdi_metrics/mean_climate/scripts/README.md index 91b5ea957..dc442e9fd 100644 --- a/pcmdi_metrics/mean_climate/scripts/README.md +++ b/pcmdi_metrics/mean_climate/scripts/README.md @@ -7,4 +7,9 @@ ## Prepare run metrics calculations * `get_all_MIP_mods_from_CLIMS.py`: Generate a json file that includes list of models, e.g., `all_mip_mods-v20230130.json` -mean_climate_driver.py -p ../param/pcmdi_MIP_EXP_pmp_parameterfile.py +## Calculate metrics +* Serial mode + * mean_climate_driver.py -p ../param/pcmdi_MIP_EXP_pmp_parameterfile.py + +## Merge individual JSON files +* post_process_merge_jsons.py diff --git a/pcmdi_metrics/mean_climate/scripts/post_process_merge_jsons.py b/pcmdi_metrics/mean_climate/scripts/post_process_merge_jsons.py new file mode 100755 index 000000000..4ff3286f9 --- /dev/null +++ b/pcmdi_metrics/mean_climate/scripts/post_process_merge_jsons.py @@ -0,0 +1,114 @@ +#!/usr/bin/env python + +import copy +import glob +import json +import os +import sys + +from genutil import StringConstructor + +from pcmdi_metrics.variability_mode.lib import dict_merge + + +def main(): + # mips = ['cmip5', 'cmip6'] + mips = ["cmip6"] + # mips = ['cmip3'] + + # exps = ['historical', 'amip'] + exps = ['historical'] + # exps = ["amip"] + # exps = ['20c3m', 'amip'] + # exps = ['20c3m'] + + case_id = "v20230202" + + syear = 1900 + eyear = 2005 + + obs_selection = "default" + # obs_selection = 'alternative' + + # pmprdir = '/work/lee1043/temporary/result_test' + pmprdir = "/p/user_pub/pmp/pmp_results/pmp_v1.1.2" + + for mip in mips: + for exp in exps: + variables = [s.split('/')[-1] for s in glob.glob(os.path.join(pmprdir, "metrics_results", "mean_climate", mip, exp, case_id, "*")) if os.path.isdir(s)] + print("variables:", variables) + for var in variables: + # json merge + #try: + if 1: + merge_json(mip, exp, case_id, var, obs_selection, syear, eyear, pmprdir) + """ + except Exception as err: + print("ERROR: ", mip, exp, var, err) + pass + """ + +def merge_json(mip, exp, case_id, var, obs, syear, eyear, pmprdir): + json_file_dir_template = ( + "metrics_results/mean_climate/%(mip)/%(exp)/%(case_id)/%(var)" + ) + json_file_dir_template = StringConstructor(json_file_dir_template) + json_file_dir = os.path.join( + pmprdir, + json_file_dir_template(mip=mip, exp=exp, case_id=case_id, var=var), + ) + + print('json_file_dir:', json_file_dir) + + json_file_template = "%(model)_%(var)_*_%(obs).json" + json_file_template = StringConstructor(json_file_template) + + # Search for individual JSONs + json_files = sorted( + glob.glob( + os.path.join( + json_file_dir, + json_file_template( + # mip=mip, + # exp=exp, + var=var, + model="*", + # run="*", + obs=obs, + ), + ) + ) + ) + + print('json_files:', json_files) + + # Remove diveDown JSONs and previously generated merged JSONs if included + json_files_revised = copy.copy(json_files) + for j, json_file in enumerate(json_files): + filename_component = json_file.split("/")[-1].split(".")[0].split("_") + if "allModels" in filename_component: + json_files_revised.remove(json_file) + elif "allRuns" in filename_component: + json_files_revised.remove(json_file) + + # Load individual JSON and merge to one big dictionary + for j, json_file in enumerate(json_files_revised): + print(j, json_file) + f = open(json_file) + dict_tmp = json.loads(f.read()) + if j == 0: + dict_final = dict_tmp.copy() + else: + dict_merge(dict_final, dict_tmp) + f.close() + + # Dump final dictionary to JSON + final_json_filename = StringConstructor("%(var)_%(mip)_%(exp)_%(case_id).json")(var=var, mip=mip, exp=exp, case_id=case_id) + final_json_file = os.path.join(json_file_dir, "..", final_json_filename) + + with open(final_json_file, "w") as fp: + json.dump(dict_final, fp, sort_keys=True, indent=4) + + +if __name__ == "__main__": + main() From 650dc267dc0e41edfa3907afa3efb1785f80f8ac Mon Sep 17 00:00:00 2001 From: lee1043 Date: Wed, 8 Feb 2023 13:47:09 -0800 Subject: [PATCH 112/130] bug fix --- .../lib/create_mean_climate_parser.py | 12 ++++--- .../mean_climate/mean_climate_driver.py | 36 +++++++++++++------ .../scripts/allvars_parallel_mod_clims.py | 5 ++- 3 files changed, 34 insertions(+), 19 deletions(-) diff --git a/pcmdi_metrics/mean_climate/lib/create_mean_climate_parser.py b/pcmdi_metrics/mean_climate/lib/create_mean_climate_parser.py index bb0c585c8..937ed95b3 100644 --- a/pcmdi_metrics/mean_climate/lib/create_mean_climate_parser.py +++ b/pcmdi_metrics/mean_climate/lib/create_mean_climate_parser.py @@ -135,7 +135,10 @@ def create_mean_climate_parser(): ) parser.add_argument( - "--ext", dest="ext", help="Extension for the output files?", required=False + "--ext", + dest="ext", + help="Extension for the output files?", + required=False ) parser.add_argument( @@ -188,6 +191,7 @@ def create_mean_climate_parser(): type=lambda x: x.lower() == "true", dest="save_test_clims", help="True if to save interpolated test climatologies," + " otherwise False", + default=False, required=False, ) @@ -214,8 +218,8 @@ def create_mean_climate_parser(): parser.add_argument( "--debug", dest="debug", + action="store_true", help="Turn on debugging mode by printing more information to track progress", - default=False, required=False, ) @@ -224,7 +228,6 @@ def create_mean_climate_parser(): dest="cmec", action="store_true", help="Save metrics in CMEC format", - default=False, required=False, ) @@ -233,8 +236,7 @@ def create_mean_climate_parser(): dest="cmec", action="store_false", help="Option to not save metrics in CMEC format", - default=False, required=False, ) - return parser + return parser \ No newline at end of file diff --git a/pcmdi_metrics/mean_climate/mean_climate_driver.py b/pcmdi_metrics/mean_climate/mean_climate_driver.py index 1df552c02..dd4627f18 100755 --- a/pcmdi_metrics/mean_climate/mean_climate_driver.py +++ b/pcmdi_metrics/mean_climate/mean_climate_driver.py @@ -8,7 +8,7 @@ import cdms2 import cdutil import numpy as np -import xcdat +import xcdat as xc from pcmdi_metrics import resources from pcmdi_metrics.io import load_regions_specs, region_subset @@ -22,7 +22,13 @@ parser = create_mean_climate_parser() -parameter = parser.get_parameter(cmd_default_vars=False, argparse_vals_only=False) +#parameter = parser.get_parameter(cmd_default_vars=False, argparse_vals_only=False) +#parameter = parser.get_parameter(cmd_default_vars=True, argparse_vals_only=False) +parameter = parser.get_parameter(argparse_vals_only=False) +#parameter = parser.get_parameter() +#print(parameter) +#import sys +#sys.exit('test') # parameters case_id = parameter.case_id @@ -45,10 +51,12 @@ metrics_output_path = parameter.metrics_output_path.replace('%(case_id)', case_id) debug = parameter.debug -cmec = False # temporary +cmec = parameter.cmec +find_all_realizations = False if realization is None: - realizations = [""] + realization = "" + realizations = [realization] elif isinstance(realization, str): if realization.lower() in ["all", "*"]: find_all_realizations = True @@ -85,7 +93,7 @@ # generate target grid if target_grid == "2.5x2.5": # target grid for regridding - t_grid = xcdat.create_uniform_grid(-88.875, 88.625, 2.5, 0, 357.5, 2.5) + t_grid = xc.create_uniform_grid(-88.875, 88.625, 2.5, 0, 357.5, 2.5) if debug: print('type(t_grid):', type(t_grid)) # Expected type is 'xarray.core.dataset.Dataset' print('t_grid:', t_grid) @@ -137,6 +145,10 @@ # ---------------- # observation loop # ---------------- + if "all" in reference_data_set: + reference_data_set = [x for x in list(obs_dict[varname].keys()) if (x == "default" or "alternate" in x)] + print("reference_data_set (all): ", reference_data_set) + for ref in reference_data_set: print('ref:', ref) # identify data to load (annual cycle (AC) data is loading in) @@ -157,7 +169,7 @@ if find_all_realizations: test_data_full_path = os.path.join( test_data_path, - filename_template).replace('%(variable)', varname).replace('%(model)', model).replace('%(realization)', '*') + filename_template).replace('%(variable)', varname).replace('%(model)', model).replace('%(model_version)', model).replace('%(realization)', '*') ncfiles = glob.glob(test_data_full_path) realizations = [] for ncfile in ncfiles: @@ -169,15 +181,17 @@ # identify data to load (annual cycle (AC) data is loading in) test_data_full_path = os.path.join( test_data_path, - filename_template).replace('%(variable)', varname).replace('%(model)', model).replace('%(realization)', run) + filename_template).replace('%(variable)', varname).replace('%(model)', model).replace('%(model_version)', model).replace('%(realization)', run) if os.path.exists(test_data_full_path): print('-----------------------') print('model, run:', model, run) - try: + print('test_data (model in this case) full_path:', test_data_full_path) + #try: + if 1: ds_test_dict = dict() # load data and regrid - ds_test = load_and_regrid(test_data_full_path, varname, level, t_grid, regrid_tool=regrid_tool, debug=debug) + ds_test = load_and_regrid(test_data_full_path, varname, level, t_grid, decode_times=True, regrid_tool=regrid_tool, debug=debug) print('load and regrid done') # ----------- @@ -235,11 +249,11 @@ cmec_flag=cmec, debug=debug ) - + """ except Exception as e: print('error occured for ', model, run) print(e) - + """ """ # write collective JSON --- all models / all obs / single variable json_filename = "_".join([var, target_grid, regrid_tool, "metrics"]) diff --git a/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py b/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py index 33ed2ed0f..4d4ecdab8 100644 --- a/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py +++ b/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py @@ -4,9 +4,9 @@ from pcmdi_metrics.misc.scripts import parallel_submitter +mip = 'cmip5' exp = 'historical' # exp = 'amip' -mip = 'cmip6' verin = 'v20230201' start = '1981-01' end = '2005-12' @@ -18,8 +18,7 @@ # vars = ['ta', 'ua', 'va', 'zg', 'hur', 'hus'] # vars = ['ts', 'tas', 'uas', 'vas', 'huss', 'hurs', 'psl', 'prw', 'sfcWind', 'tauu', 'tauv', 'pr', 'rlut', 'rsut', 'rlutcs', 'rsutcs', 'rsdt', 'rsus', 'rsds', 'rlds', 'rlus', 'rldscs', 'rsdscs', 'ta', 'ua', 'va', 'zg', 'hur', 'hus'] # vars = ['ts', 'pr'] -# vars = ['tas', 'uas', 'vas', 'huss', 'hurs', 'psl', 'prw', 'sfcWind', 'tauu', 'tauv', 'rlut', 'rsut', 'rlutcs', 'rsutcs', 'rsdt', 'rsus', 'rsds', 'rlds', 'rlus', 'rldscs', 'rsdscs'] -vars = ['ta', 'ua', 'va', 'zg', 'hur'] +vars = ['tas', 'uas', 'vas', 'huss', 'hurs', 'psl', 'prw', 'sfcWind', 'tauu', 'tauv', 'rlut', 'rsut', 'rlutcs', 'rsutcs', 'rsdt', 'rsus', 'rsds', 'rlds', 'rlus', 'rldscs', 'rsdscs', 'ta', 'ua', 'va', 'zg', 'hur'] lst1 = [] listlog = [] From c6aae7215542c18f7a42293d72f6f338ae1da19c Mon Sep 17 00:00:00 2001 From: lee1043 Date: Mon, 13 Feb 2023 09:48:52 -0800 Subject: [PATCH 113/130] clean up --- .../mean_climate/scripts/allvars_parallel_mod_clims.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py b/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py index 4d4ecdab8..4b052e3d4 100644 --- a/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py +++ b/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py @@ -17,8 +17,8 @@ # vars = ['ts', 'tas', 'uas', 'vas', 'huss', 'hurs', 'psl', 'prw', 'sfcWind', 'tauu', 'tauv', 'pr', 'rlut', 'rsut', 'rlutcs', 'rsutcs', 'rsdt', 'rsus', 'rsds', 'rlds', 'rlus', 'rldscs', 'rsdscs'] # vars = ['ta', 'ua', 'va', 'zg', 'hur', 'hus'] # vars = ['ts', 'tas', 'uas', 'vas', 'huss', 'hurs', 'psl', 'prw', 'sfcWind', 'tauu', 'tauv', 'pr', 'rlut', 'rsut', 'rlutcs', 'rsutcs', 'rsdt', 'rsus', 'rsds', 'rlds', 'rlus', 'rldscs', 'rsdscs', 'ta', 'ua', 'va', 'zg', 'hur', 'hus'] -# vars = ['ts', 'pr'] -vars = ['tas', 'uas', 'vas', 'huss', 'hurs', 'psl', 'prw', 'sfcWind', 'tauu', 'tauv', 'rlut', 'rsut', 'rlutcs', 'rsutcs', 'rsdt', 'rsus', 'rsds', 'rlds', 'rlus', 'rldscs', 'rsdscs', 'ta', 'ua', 'va', 'zg', 'hur'] +vars = ['ts', 'pr'] +#vars = ['tas', 'uas', 'vas', 'huss', 'hurs', 'psl', 'prw', 'sfcWind', 'tauu', 'tauv', 'rlut', 'rsut', 'rlutcs', 'rsutcs', 'rsdt', 'rsus', 'rsds', 'rlds', 'rlus', 'rldscs', 'rsdscs', 'ta', 'ua', 'va', 'zg', 'hur'] lst1 = [] listlog = [] @@ -38,16 +38,15 @@ print(li.split('.')) mod = li.split('.')[4] # model rn = li.split('.')[5] # realization - vv = li.split('.')[7] # variable - outfilename = mip + '.' + exp + '.' + mod + '.r1i1p1f1.mon.' + var + '.nc' + outfilename = mip + '.' + exp + '.' + mod + '.' + rn + '.mon.' + var + '.nc' cmd0 = "pcmdi_compute_climatologies.py --start " + start + " --end " + end + " --infile " pathout = pathoutdir + '/' + outfilename cmd = cmd0 + li + ' --outfile ' + pathout + ' --var ' + var lst1.append(cmd) - logf = mod + '.' + rn + '.' + vv + logf = mod + '.' + rn + '.' + var listlog.append(logf) print(logf) From ed68a264fe128a39d6c514a77032d3c1f7b3d6c9 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Mon, 13 Feb 2023 09:50:38 -0800 Subject: [PATCH 114/130] testing --- .../scripts/get_all_MIP_mods_from_CLIMS.py | 4 ++-- pcmdi_metrics/mean_climate/scripts/mk_CRF_clims.py | 10 +++++----- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/pcmdi_metrics/mean_climate/scripts/get_all_MIP_mods_from_CLIMS.py b/pcmdi_metrics/mean_climate/scripts/get_all_MIP_mods_from_CLIMS.py index 160b057f4..5f95c08c4 100755 --- a/pcmdi_metrics/mean_climate/scripts/get_all_MIP_mods_from_CLIMS.py +++ b/pcmdi_metrics/mean_climate/scripts/get_all_MIP_mods_from_CLIMS.py @@ -1,14 +1,14 @@ import glob import json -ver = 'v20230202' +ver = 'v20230208' pin = '/p/user_pub/pmp/pmp_results/pmp_v1.1.2/diagnostic_results/CMIP_CLIMS/%(MIP)/%(EXP)/' + ver + '/ts/' # MIPS = ['cmip6', 'cmip5'] # exps = ['historical', 'amip'] -MIPS = ['cmip6'] +MIPS = ['cmip5'] exps = ['historical'] mod_dic = {} diff --git a/pcmdi_metrics/mean_climate/scripts/mk_CRF_clims.py b/pcmdi_metrics/mean_climate/scripts/mk_CRF_clims.py index 078b60aa0..924daa541 100755 --- a/pcmdi_metrics/mean_climate/scripts/mk_CRF_clims.py +++ b/pcmdi_metrics/mean_climate/scripts/mk_CRF_clims.py @@ -15,17 +15,17 @@ exp = 'historical' # exp = 'amip' -MIP = 'cmip6' # 'CMIP6' -# MIP = 'cmip5' # 'CMIP5' +# MIP = 'cmip6' # 'CMIP6' +MIP = 'cmip5' # 'CMIP5' if MIP == 'cmip6': ver = 'v20230202' if MIP == 'cmip5': - ver = 'v20200426' + ver = 'v20230208' # NEED TO RUN SEPERATELY FOR LW AND SW (i.e., rsut and rlut) -# radvar = 'rsut' -radvar = 'rlut' +radvar = 'rsut' +# radvar = 'rlut' pit = '/p/user_pub/pmp/pmp_results/pmp_v1.1.2/diagnostic_results/CMIP_CLIMS/' + MIP + '/' + exp + '/' + ver + '/' pi = pit + radvar + 'cs/' From ac390df92d9ae565ead20042c36422f3ac0f1e6c Mon Sep 17 00:00:00 2001 From: lee1043 Date: Mon, 13 Feb 2023 11:29:39 -0800 Subject: [PATCH 115/130] testing --- .../mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py b/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py index 6943c3065..30829369e 100755 --- a/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py +++ b/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py @@ -10,8 +10,8 @@ # ############################################################################### case_id = ver -MIP = 'cmip6' # 'CMIP6' -# MIP = 'cmip5' # 'CMIP6' +# MIP = 'cmip6' # 'CMIP6' +MIP = 'cmip5' # 'CMIP6' exp = 'historical' # exp = 'amip' # exp = 'picontrol' @@ -26,9 +26,7 @@ if MIP == 'cmip6': modver = 'v20230202' if MIP == 'cmip5': - modver = 'v20220928' - if exp == 'historical': - modver = 'v20220928' + modver = 'v20230208' # LIST OF MODEL VERSIONS TO BE TESTED - WHICH ARE EXPECTED TO BE PART OF CLIMATOLOGY FILENAME From 9aa6e584ab0ff89d39ead96d54ba0f3e216503e1 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Fri, 17 Feb 2023 09:33:36 -0800 Subject: [PATCH 116/130] clean up --- .../scripts/allvars_parallel_mod_clims.py | 21 +++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py b/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py index 4b052e3d4..c9e94ac3c 100644 --- a/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py +++ b/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py @@ -4,10 +4,18 @@ from pcmdi_metrics.misc.scripts import parallel_submitter -mip = 'cmip5' + +def find_latest(path): + dir_list = [p for p in glob.glob(path + "/v????????")] + return sorted(dir_list)[-1] + + +# mip = 'cmip5' +mip = 'cmip6' exp = 'historical' # exp = 'amip' -verin = 'v20230201' +# verin = 'v20230201' +data_path = find_latest("/p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest") start = '1981-01' end = '2005-12' numw = 35 # number of workers in parallel processing @@ -17,16 +25,17 @@ # vars = ['ts', 'tas', 'uas', 'vas', 'huss', 'hurs', 'psl', 'prw', 'sfcWind', 'tauu', 'tauv', 'pr', 'rlut', 'rsut', 'rlutcs', 'rsutcs', 'rsdt', 'rsus', 'rsds', 'rlds', 'rlus', 'rldscs', 'rsdscs'] # vars = ['ta', 'ua', 'va', 'zg', 'hur', 'hus'] # vars = ['ts', 'tas', 'uas', 'vas', 'huss', 'hurs', 'psl', 'prw', 'sfcWind', 'tauu', 'tauv', 'pr', 'rlut', 'rsut', 'rlutcs', 'rsutcs', 'rsdt', 'rsus', 'rsds', 'rlds', 'rlus', 'rldscs', 'rsdscs', 'ta', 'ua', 'va', 'zg', 'hur', 'hus'] -vars = ['ts', 'pr'] -#vars = ['tas', 'uas', 'vas', 'huss', 'hurs', 'psl', 'prw', 'sfcWind', 'tauu', 'tauv', 'rlut', 'rsut', 'rlutcs', 'rsutcs', 'rsdt', 'rsus', 'rsds', 'rlds', 'rlus', 'rldscs', 'rsdscs', 'ta', 'ua', 'va', 'zg', 'hur'] +# vars = ['ts', 'pr', 'tas', 'uas', 'vas', 'huss', 'hurs', 'psl', 'prw', 'sfcWind', 'tauu', 'tauv', 'rlut', 'rsut', 'rlutcs', 'rsutcs', 'rsdt', 'rsus', 'rsds', 'rlds', 'rlus', 'rldscs', 'rsdscs', 'ta', 'ua', 'va', 'zg', 'hur'] +vars = ['hur', 'hurs', 'huss', 'pr', 'prw', 'psl', 'rlds', 'rldscs', 'rlus', 'rlut', 'rlutcs', 'rsds', 'rsdscs', 'rsdt', 'rsus', 'rsut', 'rsutcs', 'sfcWind', 'ta', 'tas', 'tauu', 'tauv', 'ts', 'ua', 'uas', 'va', 'vas', 'zg'] lst1 = [] listlog = [] for var in vars: - pin = '/p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/' + verin + '/' + mip + '/' + exp + '/atmos/mon/' + var + '/' + # pin = '/p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/' + verin + '/' + mip + '/' + exp + '/atmos/mon/' + var + '/' + pin = os.path.join(data_path, mip, exp,'atmos', 'mon', var) - lst = sorted(glob.glob(pin + '*r1i1p1*.xml')) + lst = sorted(glob.glob(os.path.join(pin, '*r1i1p1*.xml'))) pathout_base = '/p/user_pub/pmp/pmp_results/pmp_v1.1.2/diagnostic_results/CMIP_CLIMS/' + mip + '/' + exp + '/' pathoutdir = os.path.join(pathout_base, verout, var) From e6e47b74e24fe67f70f850b1ce97d82612687404 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Sat, 18 Feb 2023 21:55:43 -0800 Subject: [PATCH 117/130] add proper time bound if cdms-generated annual cycle data was loaded --- pcmdi_metrics/mean_climate/lib/load_and_regrid.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/pcmdi_metrics/mean_climate/lib/load_and_regrid.py b/pcmdi_metrics/mean_climate/lib/load_and_regrid.py index 4aa97b894..78648338d 100644 --- a/pcmdi_metrics/mean_climate/lib/load_and_regrid.py +++ b/pcmdi_metrics/mean_climate/lib/load_and_regrid.py @@ -1,4 +1,5 @@ from pcmdi_metrics.io import xcdat_open +import cftime def load_and_regrid(data_path, varname, level=None, t_grid=None, decode_times=True, regrid_tool='regrid2', debug=False): @@ -15,6 +16,11 @@ def load_and_regrid(data_path, varname, level=None, t_grid=None, decode_times=Tr """ # load data ds = xcdat_open(data_path, data_var=varname, decode_times=decode_times) # NOTE: decode_times=False will be removed once obs4MIP written using xcdat + # time bound check -- add proper time bound info if cdms-generated annual cycle is loaded + if not isinstance(ds.time.values[0], cftime._cftime.DatetimeProlepticGregorian) and "units" not in list(ds.time.attrs.keys()): + ds.time.attrs['units'] = "days since 0001-01-01" + ds = xc.decode_time(ds) + # level if level is not None: level = level * 100 # hPa to Pa ds = ds.sel(plev=level) From aed838f206da89029bada51683fbced6b83a203d Mon Sep 17 00:00:00 2001 From: lee1043 Date: Sat, 18 Feb 2023 21:56:25 -0800 Subject: [PATCH 118/130] clean up --- .../mean_climate/scripts/allvars_parallel_mod_clims.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py b/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py index c9e94ac3c..a02df4d47 100644 --- a/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py +++ b/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py @@ -10,15 +10,15 @@ def find_latest(path): return sorted(dir_list)[-1] -# mip = 'cmip5' -mip = 'cmip6' +mip = 'cmip5' +# mip = 'cmip6' exp = 'historical' # exp = 'amip' # verin = 'v20230201' data_path = find_latest("/p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest") start = '1981-01' end = '2005-12' -numw = 35 # number of workers in parallel processing +numw = 20 # number of workers in parallel processing verout = datetime.datetime.now().strftime('v%Y%m%d') # vars = ['rlut', 'tas', 'pr'] @@ -27,6 +27,7 @@ def find_latest(path): # vars = ['ts', 'tas', 'uas', 'vas', 'huss', 'hurs', 'psl', 'prw', 'sfcWind', 'tauu', 'tauv', 'pr', 'rlut', 'rsut', 'rlutcs', 'rsutcs', 'rsdt', 'rsus', 'rsds', 'rlds', 'rlus', 'rldscs', 'rsdscs', 'ta', 'ua', 'va', 'zg', 'hur', 'hus'] # vars = ['ts', 'pr', 'tas', 'uas', 'vas', 'huss', 'hurs', 'psl', 'prw', 'sfcWind', 'tauu', 'tauv', 'rlut', 'rsut', 'rlutcs', 'rsutcs', 'rsdt', 'rsus', 'rsds', 'rlds', 'rlus', 'rldscs', 'rsdscs', 'ta', 'ua', 'va', 'zg', 'hur'] vars = ['hur', 'hurs', 'huss', 'pr', 'prw', 'psl', 'rlds', 'rldscs', 'rlus', 'rlut', 'rlutcs', 'rsds', 'rsdscs', 'rsdt', 'rsus', 'rsut', 'rsutcs', 'sfcWind', 'ta', 'tas', 'tauu', 'tauv', 'ts', 'ua', 'uas', 'va', 'vas', 'zg'] +# vars = ['ts', 'pr'] lst1 = [] listlog = [] From e4914d8e4c89d55527c32fff560e143bdb3feb3d Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Sun, 19 Feb 2023 20:41:08 -0800 Subject: [PATCH 119/130] bug fix --- pcmdi_metrics/mean_climate/lib/load_and_regrid.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pcmdi_metrics/mean_climate/lib/load_and_regrid.py b/pcmdi_metrics/mean_climate/lib/load_and_regrid.py index 78648338d..356d7301e 100644 --- a/pcmdi_metrics/mean_climate/lib/load_and_regrid.py +++ b/pcmdi_metrics/mean_climate/lib/load_and_regrid.py @@ -1,6 +1,6 @@ from pcmdi_metrics.io import xcdat_open import cftime - +import xcdat as xc def load_and_regrid(data_path, varname, level=None, t_grid=None, decode_times=True, regrid_tool='regrid2', debug=False): """Load data and regrid to target grid From 3236a5af5d834765e205dab4daff3f173adc392a Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Tue, 21 Feb 2023 13:29:08 -0800 Subject: [PATCH 120/130] add xesmf as one of dependency --- conda-env/dev.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/conda-env/dev.yml b/conda-env/dev.yml index c8090fc75..b32d178e6 100644 --- a/conda-env/dev.yml +++ b/conda-env/dev.yml @@ -26,6 +26,7 @@ dependencies: - regionmask=0.9.0 - rasterio=1.2.10 - shapely=1.8.0 + - xesmf=0.7.0 # Testing # ================== - pre_commit=2.20.0 From 33f6367655b19e73517155190d92385d92bd8417 Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Tue, 21 Feb 2023 13:29:52 -0800 Subject: [PATCH 121/130] update demo -- in particular for custom domain setup --- doc/jupyter/Demo/Demo_1b_mean_climate.ipynb | 1085 +++++++++++++++---- 1 file changed, 852 insertions(+), 233 deletions(-) diff --git a/doc/jupyter/Demo/Demo_1b_mean_climate.ipynb b/doc/jupyter/Demo/Demo_1b_mean_climate.ipynb index c82af02ac..4627d175f 100644 --- a/doc/jupyter/Demo/Demo_1b_mean_climate.ipynb +++ b/doc/jupyter/Demo/Demo_1b_mean_climate.ipynb @@ -40,7 +40,8 @@ "cell_type": "code", "execution_count": 2, "metadata": { - "scrolled": true + "scrolled": true, + "tags": [] }, "outputs": [ { @@ -126,34 +127,156 @@ "cell_type": "code", "execution_count": 3, "metadata": { - "scrolled": true + "scrolled": true, + "tags": [] }, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: REGION: Global\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: alternate1 is an obs\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: Could not figure out obs mask name from obs json file\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: TEST DATA IS: ACCESS1-0\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: ACCESS1-0 is a model\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut_2.5x2.5_regrid2_linear_metrics\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut_2.5x2.5_regrid2_linear_metrics.json\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: TEST DATA IS: CanCM4\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: CanCM4 is a model\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut_2.5x2.5_regrid2_linear_metrics\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut_2.5x2.5_regrid2_linear_metrics.json\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: default is an obs\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: Could not figure out obs mask name from obs json file\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: TEST DATA IS: ACCESS1-0\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: ACCESS1-0 is a model\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut_2.5x2.5_regrid2_linear_metrics\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut_2.5x2.5_regrid2_linear_metrics.json\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: TEST DATA IS: CanCM4\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: CanCM4 is a model\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut_2.5x2.5_regrid2_linear_metrics\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut_2.5x2.5_regrid2_linear_metrics.json\n" + "2023-02-20 08:56:23,632 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-20 08:56::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-20 08:56:48,696 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-20 08:56:48,718 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-20 08:57::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-20 08:57:07,713 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-20 08:57:09,161 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-20 08:57::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "2023-02-20 08:57:27,364 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "2023-02-20 08:57:27,382 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-20 08:57::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "2023-02-20 08:57:46,468 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "INFO::2023-02-20 08:57::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut_2.5x2.5_regrid2_metrics.json\n", + "2023-02-20 08:57:46,471 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut_2.5x2.5_regrid2_metrics.json\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "case_id: basicTest \n", + " test_data_set: ['ACCESS1-0', 'CanCM4'] \n", + " realization: \n", + " vars: ['rlut'] \n", + " reference_data_set: ['all'] \n", + " target_grid: 2.5x2.5 \n", + " regrid_tool: regrid2 \n", + " regrid_tool_ocn: esmf \n", + " save_test_clims: False \n", + " test_clims_interpolated_output: None \n", + " filename_template: cmip5.historical.%(model_version).r1i1p1.mon.%(variable).198101-200512.AC.v20200426.nc \n", + " sftlf_filename_template: sftlf_%(model_version).nc \n", + " generate_sftlf: True \n", + " regions_specs: {'NHEX': {'domain': {'latitude': (30.0, 90)}}, 'SHEX': {'domain': {'latitude': (-90.0, -30)}}, 'TROPICS': {'domain': {'latitude': (-30.0, 30)}}, 'global': {}, '90S50S': {'domain': {'latitude': (-90.0, -50)}}, '50S20S': {'domain': {'latitude': (-50.0, -20)}}, '20S20N': {'domain': {'latitude': (-20.0, 20)}}, '20N50N': {'domain': {'latitude': (20.0, 50)}}, '50N90N': {'domain': {'latitude': (50.0, 90)}}, 'land_NHEX': {'value': 100, 'domain': {'latitude': (30.0, 90)}}, 'land_SHEX': {'value': 100, 'domain': {'latitude': (-90.0, -30)}}, 'land_TROPICS': {'value': 100, 'domain': {'latitude': (-30.0, 30)}}, 'land': {'value': 100}, 'ocean_NHEX': {'value': 0, 'domain': {'latitude': (30.0, 90)}}, 'ocean_SHEX': {'value': 0, 'domain': {'latitude': (-90.0, -30)}}, 'ocean_TROPICS': {'value': 0, 'domain': {'latitude': (30.0, 30)}}, 'ocean': {'value': 0}, 'CONUS': {'domain': {'latitude': (24.7, 49.4), 'longitude': (-124.78, -66.92)}}, 'land_CONUS': {'value': 100, 'domain': {'latitude': (24.7, 49.4), 'longitude': (-124.78, -66.92)}}, 'NAM': {'domain': {'latitude': (20.0, 90), 'longitude': (-180, 180)}}, 'NAO': {'domain': {'latitude': (20.0, 80), 'longitude': (-90, 40)}}, 'SAM': {'domain': {'latitude': (-20.0, -90), 'longitude': (0, 360)}}, 'PNA': {'domain': {'latitude': (20.0, 85), 'longitude': (120, 240)}}, 'PDO': {'domain': {'latitude': (20.0, 70), 'longitude': (110, 260)}}, 'AllMW': {'domain': {'latitude': (-40.0, 45.0), 'longitude': (0.0, 360.0)}}, 'AllM': {'domain': {'latitude': (-45.0, 45.0), 'longitude': (0.0, 360.0)}}, 'NAMM': {'domain': {'latitude': (0.0, 45.0), 'longitude': (210.0, 310.0)}}, 'SAMM': {'domain': {'latitude': (-45.0, 0.0), 'longitude': (240.0, 330.0)}}, 'NAFM': {'domain': {'latitude': (0.0, 45.0), 'longitude': (310.0, 60.0)}}, 'SAFM': {'domain': {'latitude': (-45.0, 0.0), 'longitude': (0.0, 90.0)}}, 'ASM': {'domain': {'latitude': (0.0, 45.0), 'longitude': (60.0, 180.0)}}, 'AUSM': {'domain': {'latitude': (-45.0, 0.0), 'longitude': (90.0, 160.0)}}, 'AIR': {'domain': {'latitude': (7.0, 25.0), 'longitude': (65.0, 85.0)}}, 'AUS': {'domain': {'latitude': (-20.0, -10.0), 'longitude': (120.0, 150.0)}}, 'Sahel': {'domain': {'latitude': (13.0, 18.0), 'longitude': (-10.0, 10.0)}}, 'GoG': {'domain': {'latitude': (0.0, 5.0), 'longitude': (-10.0, 10.0)}}, 'NAmo': {'domain': {'latitude': (20.0, 37.0), 'longitude': (-112.0, -103.0)}}, 'SAmo': {'domain': {'latitude': (-20.0, 2.5), 'longitude': (-65.0, -40.0)}}} \n", + " regions: {'rlut': ['Global']} \n", + " test_data_path: demo_data/CMIP5_demo_clims/ \n", + " reference_data_path: demo_data/obs4MIPs_PCMDI_clims \n", + " metrics_output_path: demo_output/basicTest \n", + " debug: False \n", + "\n", + "--- prepare mean climate metrics calculation ---\n", + "--- start mean climate metrics calculation ---\n", + "varname: rlut\n", + "level: None\n", + "reference_data_set (all): ['alternate1', 'default']\n", + "ref: alternate1\n", + "ref_data_full_path: demo_data/obs4MIPs_PCMDI_clims/rlut/CERES-EBAF-4-0/v20210804/rlut_mon_CERES-EBAF-4-0_PCMDI_gn.200301-201812.AC.v20210804.nc\n", + "-----------------------\n", + "model, run: ACCESS1-0 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.ACCESS1-0.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: Global\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "-----------------------\n", + "model, run: CanCM4 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.CanCM4.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: Global\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "ref: default\n", + "ref_data_full_path: demo_data/obs4MIPs_PCMDI_clims/rlut/CERES-EBAF-4-1/v20210804/rlut_mon_CERES-EBAF-4-1_PCMDI_gn.200301-201812.AC.v20210804.nc\n", + "-----------------------\n", + "model, run: ACCESS1-0 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.ACCESS1-0.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: Global\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "-----------------------\n", + "model, run: CanCM4 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.CanCM4.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: Global\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "pmp mean clim driver completed\n" ] } ], @@ -172,7 +295,10 @@ { "cell_type": "code", "execution_count": 4, - "metadata": {}, + "metadata": { + "scrolled": true, + "tags": [] + }, "outputs": [ { "name": "stdout", @@ -180,27 +306,11 @@ "text": [ "{\n", " \"ACCESS1-0\": {\n", - " \"units\": \"W m-2\",\n", - " \"SimulationDescription\": {\n", - " \"MIPTable\": \"Amon\",\n", - " \"Model\": \"ACCESS1-0\",\n", - " \"ModelActivity\": \"CMIP5\",\n", - " \"ModellingGroup\": \"CSIRO-BOM\",\n", - " \"Experiment\": \"historical\",\n", - " \"ModelFreeSpace\": \"N/A\",\n", - " \"Realization\": \"\",\n", - " \"creation_date\": \"2012-01-15T12:34:39Z\"\n", - " },\n", - " \"InputClimatologyFileName\": \"cmip5.historical.ACCESS1-0.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\",\n", - " \"InputClimatologyMD5\": \"16fb29fa02cc8c68e170502bca145640\",\n", - " \"InputRegionFileName\": null,\n", - " \"InputRegionMD5\": null,\n", " \"alternate1\": {\n", - " \"source\": \"CERES-EBAF-4-0\",\n", " \"\": {\n", " \"Global\": {\n", " \"bias_xy\": {\n", - " \"ann\": \"1.124\",\n", + " \"ann\": \"1.138\",\n", " \"djf\": \"1.675\",\n", " \"mam\": \"1.392\",\n", " \"jja\": \"0.859\",\n", @@ -242,7 +352,7 @@ " ]\n", " },\n", " \"mae_xy\": {\n", - " \"ann\": \"5.784\",\n", + " \"ann\": \"5.770\",\n", " \"djf\": \"7.158\",\n", " \"mam\": \"7.246\",\n", " \"jja\": \"7.512\",\n", @@ -263,7 +373,7 @@ " ]\n", " },\n", " \"mean-obs_xy\": {\n", - " \"ann\": \"240.331\",\n", + " \"ann\": \"240.317\",\n", " \"djf\": \"237.540\",\n", " \"mam\": \"239.327\",\n", " \"jja\": \"243.879\",\n", @@ -305,10 +415,10 @@ " ]\n", " },\n", " \"rms_devzm\": {\n", - " \"ann\": \"5.805\"\n", + " \"ann\": \"5.808\"\n", " },\n", " \"rms_xy\": {\n", - " \"ann\": \"8.062\",\n", + " \"ann\": \"8.043\",\n", " \"djf\": \"10.231\",\n", " \"mam\": \"10.774\",\n", " \"jja\": \"10.439\",\n", @@ -329,13 +439,13 @@ " ]\n", " },\n", " \"rms_xyt\": {\n", - " \"ann\": \"11.457\"\n", + " \"ann\": \"11.443\"\n", " },\n", " \"rms_y\": {\n", - " \"ann\": \"4.833\"\n", + " \"ann\": \"5.565\"\n", " },\n", " \"rmsc_xy\": {\n", - " \"ann\": \"7.983\",\n", + " \"ann\": \"7.962\",\n", " \"djf\": \"10.093\",\n", " \"mam\": \"10.684\",\n", " \"jja\": \"10.404\",\n", @@ -356,7 +466,7 @@ " ]\n", " },\n", " \"std-obs_xy\": {\n", - " \"ann\": \"29.645\",\n", + " \"ann\": \"29.642\",\n", " \"djf\": \"32.679\",\n", " \"mam\": \"30.811\",\n", " \"jja\": \"35.368\",\n", @@ -377,10 +487,10 @@ " ]\n", " },\n", " \"std-obs_xy_devzm\": {\n", - " \"ann\": \"12.980\"\n", + " \"ann\": \"12.977\"\n", " },\n", " \"std-obs_xyt\": {\n", - " \"ann\": \"33.463\"\n", + " \"ann\": \"33.461\"\n", " },\n", " \"std_xy\": {\n", " \"ann\": \"31.968\",\n", @@ -413,11 +523,10 @@ " }\n", " },\n", " \"default\": {\n", - " \"source\": \"CERES-EBAF-4-1\",\n", " \"\": {\n", " \"Global\": {\n", " \"bias_xy\": {\n", - " \"ann\": \"1.122\",\n", + " \"ann\": \"1.137\",\n", " \"djf\": \"1.644\",\n", " \"mam\": \"1.325\",\n", " \"jja\": \"0.866\",\n", @@ -459,7 +568,7 @@ " ]\n", " },\n", " \"mae_xy\": {\n", - " \"ann\": \"5.777\",\n", + " \"ann\": \"5.763\",\n", " \"djf\": \"7.165\",\n", " \"mam\": \"7.307\",\n", " \"jja\": \"7.555\",\n", @@ -480,7 +589,7 @@ " ]\n", " },\n", " \"mean-obs_xy\": {\n", - " \"ann\": \"240.333\",\n", + " \"ann\": \"240.318\",\n", " \"djf\": \"237.570\",\n", " \"mam\": \"239.394\",\n", " \"jja\": \"243.872\",\n", @@ -522,10 +631,10 @@ " ]\n", " },\n", " \"rms_devzm\": {\n", - " \"ann\": \"5.804\"\n", + " \"ann\": \"5.807\"\n", " },\n", " \"rms_xy\": {\n", - " \"ann\": \"8.050\",\n", + " \"ann\": \"8.033\",\n", " \"djf\": \"10.240\",\n", " \"mam\": \"10.871\",\n", " \"jja\": \"10.484\",\n", @@ -546,13 +655,13 @@ " ]\n", " },\n", " \"rms_xyt\": {\n", - " \"ann\": \"11.457\"\n", + " \"ann\": \"11.441\"\n", " },\n", " \"rms_y\": {\n", - " \"ann\": \"4.819\"\n", + " \"ann\": \"5.549\"\n", " },\n", " \"rmsc_xy\": {\n", - " \"ann\": \"7.972\",\n", + " \"ann\": \"7.952\",\n", " \"djf\": \"10.107\",\n", " \"mam\": \"10.790\",\n", " \"jja\": \"10.449\",\n", @@ -573,7 +682,7 @@ " ]\n", " },\n", " \"std-obs_xy\": {\n", - " \"ann\": \"29.642\",\n", + " \"ann\": \"29.638\",\n", " \"djf\": \"32.730\",\n", " \"mam\": \"30.769\",\n", " \"jja\": \"35.354\",\n", @@ -594,10 +703,10 @@ " ]\n", " },\n", " \"std-obs_xy_devzm\": {\n", - " \"ann\": \"12.973\"\n", + " \"ann\": \"12.970\"\n", " },\n", " \"std-obs_xyt\": {\n", - " \"ann\": \"33.413\"\n", + " \"ann\": \"33.411\"\n", " },\n", " \"std_xy\": {\n", " \"ann\": \"31.968\",\n", @@ -631,27 +740,11 @@ " }\n", " },\n", " \"CanCM4\": {\n", - " \"units\": \"W m-2\",\n", - " \"SimulationDescription\": {\n", - " \"MIPTable\": \"Amon\",\n", - " \"Model\": \"CanCM4\",\n", - " \"ModelActivity\": \"CMIP5\",\n", - " \"ModellingGroup\": \"CCCma\",\n", - " \"Experiment\": \"historical\",\n", - " \"ModelFreeSpace\": \"N/A\",\n", - " \"Realization\": \"\",\n", - " \"creation_date\": \"2012-01-31T22:04:48Z\"\n", - " },\n", - " \"InputClimatologyFileName\": \"cmip5.historical.CanCM4.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\",\n", - " \"InputClimatologyMD5\": \"40b2bfa71a3b7d2febb55652ef551001\",\n", - " \"InputRegionFileName\": null,\n", - " \"InputRegionMD5\": null,\n", " \"alternate1\": {\n", - " \"source\": \"CERES-EBAF-4-0\",\n", " \"\": {\n", " \"Global\": {\n", " \"bias_xy\": {\n", - " \"ann\": \"-1.178\",\n", + " \"ann\": \"-1.164\",\n", " \"djf\": \"-0.905\",\n", " \"mam\": \"-0.959\",\n", " \"jja\": \"-1.403\",\n", @@ -693,7 +786,7 @@ " ]\n", " },\n", " \"mae_xy\": {\n", - " \"ann\": \"6.327\",\n", + " \"ann\": \"6.329\",\n", " \"djf\": \"7.489\",\n", " \"mam\": \"8.016\",\n", " \"jja\": \"7.625\",\n", @@ -714,7 +807,7 @@ " ]\n", " },\n", " \"mean-obs_xy\": {\n", - " \"ann\": \"240.331\",\n", + " \"ann\": \"240.317\",\n", " \"djf\": \"237.540\",\n", " \"mam\": \"239.327\",\n", " \"jja\": \"243.879\",\n", @@ -756,10 +849,10 @@ " ]\n", " },\n", " \"rms_devzm\": {\n", - " \"ann\": \"9.351\"\n", + " \"ann\": \"9.364\"\n", " },\n", " \"rms_xy\": {\n", - " \"ann\": \"9.606\",\n", + " \"ann\": \"9.610\",\n", " \"djf\": \"10.947\",\n", " \"mam\": \"11.785\",\n", " \"jja\": \"11.218\",\n", @@ -780,13 +873,13 @@ " ]\n", " },\n", " \"rms_xyt\": {\n", - " \"ann\": \"12.595\"\n", + " \"ann\": \"12.576\"\n", " },\n", " \"rms_y\": {\n", - " \"ann\": \"2.472\"\n", + " \"ann\": \"2.161\"\n", " },\n", " \"rmsc_xy\": {\n", - " \"ann\": \"9.533\",\n", + " \"ann\": \"9.539\",\n", " \"djf\": \"10.910\",\n", " \"mam\": \"11.746\",\n", " \"jja\": \"11.130\",\n", @@ -807,7 +900,7 @@ " ]\n", " },\n", " \"std-obs_xy\": {\n", - " \"ann\": \"29.645\",\n", + " \"ann\": \"29.642\",\n", " \"djf\": \"32.679\",\n", " \"mam\": \"30.811\",\n", " \"jja\": \"35.368\",\n", @@ -828,10 +921,10 @@ " ]\n", " },\n", " \"std-obs_xy_devzm\": {\n", - " \"ann\": \"12.980\"\n", + " \"ann\": \"12.977\"\n", " },\n", " \"std-obs_xyt\": {\n", - " \"ann\": \"33.463\"\n", + " \"ann\": \"33.461\"\n", " },\n", " \"std_xy\": {\n", " \"ann\": \"31.121\",\n", @@ -864,11 +957,10 @@ " }\n", " },\n", " \"default\": {\n", - " \"source\": \"CERES-EBAF-4-1\",\n", " \"\": {\n", " \"Global\": {\n", " \"bias_xy\": {\n", - " \"ann\": \"-1.180\",\n", + " \"ann\": \"-1.165\",\n", " \"djf\": \"-0.936\",\n", " \"mam\": \"-1.026\",\n", " \"jja\": \"-1.395\",\n", @@ -910,7 +1002,7 @@ " ]\n", " },\n", " \"mae_xy\": {\n", - " \"ann\": \"6.329\",\n", + " \"ann\": \"6.332\",\n", " \"djf\": \"7.484\",\n", " \"mam\": \"8.026\",\n", " \"jja\": \"7.644\",\n", @@ -931,7 +1023,7 @@ " ]\n", " },\n", " \"mean-obs_xy\": {\n", - " \"ann\": \"240.333\",\n", + " \"ann\": \"240.318\",\n", " \"djf\": \"237.570\",\n", " \"mam\": \"239.394\",\n", " \"jja\": \"243.872\",\n", @@ -973,10 +1065,10 @@ " ]\n", " },\n", " \"rms_devzm\": {\n", - " \"ann\": \"9.351\"\n", + " \"ann\": \"9.364\"\n", " },\n", " \"rms_xy\": {\n", - " \"ann\": \"9.603\",\n", + " \"ann\": \"9.608\",\n", " \"djf\": \"10.915\",\n", " \"mam\": \"11.801\",\n", " \"jja\": \"11.246\",\n", @@ -997,13 +1089,13 @@ " ]\n", " },\n", " \"rms_xyt\": {\n", - " \"ann\": \"12.608\"\n", + " \"ann\": \"12.587\"\n", " },\n", " \"rms_y\": {\n", - " \"ann\": \"2.467\"\n", + " \"ann\": \"2.152\"\n", " },\n", " \"rmsc_xy\": {\n", - " \"ann\": \"9.531\",\n", + " \"ann\": \"9.537\",\n", " \"djf\": \"10.875\",\n", " \"mam\": \"11.756\",\n", " \"jja\": \"11.159\",\n", @@ -1024,7 +1116,7 @@ " ]\n", " },\n", " \"std-obs_xy\": {\n", - " \"ann\": \"29.642\",\n", + " \"ann\": \"29.638\",\n", " \"djf\": \"32.730\",\n", " \"mam\": \"30.769\",\n", " \"jja\": \"35.354\",\n", @@ -1045,10 +1137,10 @@ " ]\n", " },\n", " \"std-obs_xy_devzm\": {\n", - " \"ann\": \"12.973\"\n", + " \"ann\": \"12.970\"\n", " },\n", " \"std-obs_xyt\": {\n", - " \"ann\": \"33.413\"\n", + " \"ann\": \"33.411\"\n", " },\n", " \"std_xy\": {\n", " \"ann\": \"31.121\",\n", @@ -1088,7 +1180,7 @@ "source": [ "import json\n", "import os\n", - "output_path = os.path.join(demo_output_directory,\"basicTest/rlut_2.5x2.5_regrid2_linear_metrics.json\")\n", + "output_path = os.path.join(demo_output_directory,\"basicTest/rlut_2.5x2.5_regrid2_metrics.json\")\n", "with open(output_path) as f:\n", " metric = json.load(f)[\"RESULTS\"]\n", "print(json.dumps(metric, indent=2))" @@ -1125,27 +1217,148 @@ "name": "stderr", "output_type": "stream", "text": [ - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: REGION: Global\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: alternate1 is an obs\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: Could not figure out obs mask name from obs json file\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: TEST DATA IS: ACCESS1-0\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: ACCESS1-0 is a model\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut_2.5x2.5_esmf_linear_metrics\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut_2.5x2.5_esmf_linear_metrics.json\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: TEST DATA IS: CanCM4\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: CanCM4 is a model\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut_2.5x2.5_esmf_linear_metrics\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut_2.5x2.5_esmf_linear_metrics.json\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: default is an obs\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: Could not figure out obs mask name from obs json file\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: TEST DATA IS: ACCESS1-0\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: ACCESS1-0 is a model\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut_2.5x2.5_esmf_linear_metrics\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut_2.5x2.5_esmf_linear_metrics.json\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: TEST DATA IS: CanCM4\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: CanCM4 is a model\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut_2.5x2.5_esmf_linear_metrics\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut_2.5x2.5_esmf_linear_metrics.json\n" + "2023-02-20 08:57:54,165 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-20 08:58::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut/ACCESS1-0_rlut_2.5x2.5_esmf_metrics_alternate1.json\n", + "2023-02-20 08:58:23,679 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut/ACCESS1-0_rlut_2.5x2.5_esmf_metrics_alternate1.json\n", + "2023-02-20 08:58:23,703 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-20 08:58::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut/CanCM4_rlut_2.5x2.5_esmf_metrics_alternate1.json\n", + "2023-02-20 08:58:56,596 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut/CanCM4_rlut_2.5x2.5_esmf_metrics_alternate1.json\n", + "2023-02-20 08:58:59,145 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-20 08:59::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut/ACCESS1-0_rlut_2.5x2.5_esmf_metrics_default.json\n", + "2023-02-20 08:59:36,824 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut/ACCESS1-0_rlut_2.5x2.5_esmf_metrics_default.json\n", + "2023-02-20 08:59:36,857 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-20 09:00::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut/CanCM4_rlut_2.5x2.5_esmf_metrics_default.json\n", + "2023-02-20 09:00:05,953 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut/CanCM4_rlut_2.5x2.5_esmf_metrics_default.json\n", + "INFO::2023-02-20 09:00::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut_2.5x2.5_esmf_metrics.json\n", + "2023-02-20 09:00:17,404 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut_2.5x2.5_esmf_metrics.json\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "case_id: Ex2 \n", + " test_data_set: ['ACCESS1-0', 'CanCM4'] \n", + " realization: \n", + " vars: ['rlut'] \n", + " reference_data_set: ['all'] \n", + " target_grid: 2.5x2.5 \n", + " regrid_tool: esmf \n", + " regrid_tool_ocn: esmf \n", + " save_test_clims: False \n", + " test_clims_interpolated_output: None \n", + " filename_template: cmip5.historical.%(model_version).r1i1p1.mon.%(variable).198101-200512.AC.v20200426.nc \n", + " sftlf_filename_template: sftlf_%(model_version).nc \n", + " generate_sftlf: True \n", + " regions_specs: {'NHEX': {'domain': {'latitude': (30.0, 90)}}, 'SHEX': {'domain': {'latitude': (-90.0, -30)}}, 'TROPICS': {'domain': {'latitude': (-30.0, 30)}}, 'global': {}, '90S50S': {'domain': {'latitude': (-90.0, -50)}}, '50S20S': {'domain': {'latitude': (-50.0, -20)}}, '20S20N': {'domain': {'latitude': (-20.0, 20)}}, '20N50N': {'domain': {'latitude': (20.0, 50)}}, '50N90N': {'domain': {'latitude': (50.0, 90)}}, 'land_NHEX': {'value': 100, 'domain': {'latitude': (30.0, 90)}}, 'land_SHEX': {'value': 100, 'domain': {'latitude': (-90.0, -30)}}, 'land_TROPICS': {'value': 100, 'domain': {'latitude': (-30.0, 30)}}, 'land': {'value': 100}, 'ocean_NHEX': {'value': 0, 'domain': {'latitude': (30.0, 90)}}, 'ocean_SHEX': {'value': 0, 'domain': {'latitude': (-90.0, -30)}}, 'ocean_TROPICS': {'value': 0, 'domain': {'latitude': (30.0, 30)}}, 'ocean': {'value': 0}, 'CONUS': {'domain': {'latitude': (24.7, 49.4), 'longitude': (-124.78, -66.92)}}, 'land_CONUS': {'value': 100, 'domain': {'latitude': (24.7, 49.4), 'longitude': (-124.78, -66.92)}}, 'NAM': {'domain': {'latitude': (20.0, 90), 'longitude': (-180, 180)}}, 'NAO': {'domain': {'latitude': (20.0, 80), 'longitude': (-90, 40)}}, 'SAM': {'domain': {'latitude': (-20.0, -90), 'longitude': (0, 360)}}, 'PNA': {'domain': {'latitude': (20.0, 85), 'longitude': (120, 240)}}, 'PDO': {'domain': {'latitude': (20.0, 70), 'longitude': (110, 260)}}, 'AllMW': {'domain': {'latitude': (-40.0, 45.0), 'longitude': (0.0, 360.0)}}, 'AllM': {'domain': {'latitude': (-45.0, 45.0), 'longitude': (0.0, 360.0)}}, 'NAMM': {'domain': {'latitude': (0.0, 45.0), 'longitude': (210.0, 310.0)}}, 'SAMM': {'domain': {'latitude': (-45.0, 0.0), 'longitude': (240.0, 330.0)}}, 'NAFM': {'domain': {'latitude': (0.0, 45.0), 'longitude': (310.0, 60.0)}}, 'SAFM': {'domain': {'latitude': (-45.0, 0.0), 'longitude': (0.0, 90.0)}}, 'ASM': {'domain': {'latitude': (0.0, 45.0), 'longitude': (60.0, 180.0)}}, 'AUSM': {'domain': {'latitude': (-45.0, 0.0), 'longitude': (90.0, 160.0)}}, 'AIR': {'domain': {'latitude': (7.0, 25.0), 'longitude': (65.0, 85.0)}}, 'AUS': {'domain': {'latitude': (-20.0, -10.0), 'longitude': (120.0, 150.0)}}, 'Sahel': {'domain': {'latitude': (13.0, 18.0), 'longitude': (-10.0, 10.0)}}, 'GoG': {'domain': {'latitude': (0.0, 5.0), 'longitude': (-10.0, 10.0)}}, 'NAmo': {'domain': {'latitude': (20.0, 37.0), 'longitude': (-112.0, -103.0)}}, 'SAmo': {'domain': {'latitude': (-20.0, 2.5), 'longitude': (-65.0, -40.0)}}} \n", + " regions: {'rlut': ['Global']} \n", + " test_data_path: demo_data/CMIP5_demo_clims/ \n", + " reference_data_path: demo_data/obs4MIPs_PCMDI_clims \n", + " metrics_output_path: demo_output/Ex2 \n", + " debug: False \n", + "\n", + "--- prepare mean climate metrics calculation ---\n", + "--- start mean climate metrics calculation ---\n", + "varname: rlut\n", + "level: None\n", + "reference_data_set (all): ['alternate1', 'default']\n", + "ref: alternate1\n", + "ref_data_full_path: demo_data/obs4MIPs_PCMDI_clims/rlut/CERES-EBAF-4-0/v20210804/rlut_mon_CERES-EBAF-4-0_PCMDI_gn.200301-201812.AC.v20210804.nc\n", + "-----------------------\n", + "model, run: ACCESS1-0 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.ACCESS1-0.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: Global\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "-----------------------\n", + "model, run: CanCM4 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.CanCM4.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: Global\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "ref: default\n", + "ref_data_full_path: demo_data/obs4MIPs_PCMDI_clims/rlut/CERES-EBAF-4-1/v20210804/rlut_mon_CERES-EBAF-4-1_PCMDI_gn.200301-201812.AC.v20210804.nc\n", + "-----------------------\n", + "model, run: ACCESS1-0 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.ACCESS1-0.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: Global\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "-----------------------\n", + "model, run: CanCM4 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.CanCM4.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: Global\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "pmp mean clim driver completed\n" ] } ], @@ -1173,19 +1386,96 @@ "name": "stderr", "output_type": "stream", "text": [ - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex3:: REGION: Global\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex3:: alternate1 is an obs\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex3:: Could not figure out obs mask name from obs json file\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex3:: TEST DATA IS: ACCESS1-0\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex3:: ACCESS1-0 is a model\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex3:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex3/rlut_2.5x2.5_regrid2_linear_metrics\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex3:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex3/rlut_2.5x2.5_regrid2_linear_metrics.json\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex3:: default is an obs\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex3:: Could not figure out obs mask name from obs json file\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex3:: TEST DATA IS: ACCESS1-0\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex3:: ACCESS1-0 is a model\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex3:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex3/rlut_2.5x2.5_regrid2_linear_metrics\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex3:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex3/rlut_2.5x2.5_regrid2_linear_metrics.json\n" + "2023-02-20 09:00:23,738 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-20 09:00::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex3/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-20 09:00:52,873 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex3/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-20 09:00:54,331 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-20 09:01::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex3/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "2023-02-20 09:01:28,191 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex3/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "INFO::2023-02-20 09:01::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex3/rlut_2.5x2.5_regrid2_metrics.json\n", + "2023-02-20 09:01:38,324 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex3/rlut_2.5x2.5_regrid2_metrics.json\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "case_id: Ex3 \n", + " test_data_set: ['ACCESS1-0'] \n", + " realization: \n", + " vars: ['rlut'] \n", + " reference_data_set: ['all'] \n", + " target_grid: 2.5x2.5 \n", + " regrid_tool: regrid2 \n", + " regrid_tool_ocn: esmf \n", + " save_test_clims: False \n", + " test_clims_interpolated_output: None \n", + " filename_template: cmip5.historical.%(model_version).r1i1p1.mon.%(variable).198101-200512.AC.v20200426.nc \n", + " sftlf_filename_template: sftlf_%(model_version).nc \n", + " generate_sftlf: True \n", + " regions_specs: {'NHEX': {'domain': {'latitude': (30.0, 90)}}, 'SHEX': {'domain': {'latitude': (-90.0, -30)}}, 'TROPICS': {'domain': {'latitude': (-30.0, 30)}}, 'global': {}, '90S50S': {'domain': {'latitude': (-90.0, -50)}}, '50S20S': {'domain': {'latitude': (-50.0, -20)}}, '20S20N': {'domain': {'latitude': (-20.0, 20)}}, '20N50N': {'domain': {'latitude': (20.0, 50)}}, '50N90N': {'domain': {'latitude': (50.0, 90)}}, 'land_NHEX': {'value': 100, 'domain': {'latitude': (30.0, 90)}}, 'land_SHEX': {'value': 100, 'domain': {'latitude': (-90.0, -30)}}, 'land_TROPICS': {'value': 100, 'domain': {'latitude': (-30.0, 30)}}, 'land': {'value': 100}, 'ocean_NHEX': {'value': 0, 'domain': {'latitude': (30.0, 90)}}, 'ocean_SHEX': {'value': 0, 'domain': {'latitude': (-90.0, -30)}}, 'ocean_TROPICS': {'value': 0, 'domain': {'latitude': (30.0, 30)}}, 'ocean': {'value': 0}, 'CONUS': {'domain': {'latitude': (24.7, 49.4), 'longitude': (-124.78, -66.92)}}, 'land_CONUS': {'value': 100, 'domain': {'latitude': (24.7, 49.4), 'longitude': (-124.78, -66.92)}}, 'NAM': {'domain': {'latitude': (20.0, 90), 'longitude': (-180, 180)}}, 'NAO': {'domain': {'latitude': (20.0, 80), 'longitude': (-90, 40)}}, 'SAM': {'domain': {'latitude': (-20.0, -90), 'longitude': (0, 360)}}, 'PNA': {'domain': {'latitude': (20.0, 85), 'longitude': (120, 240)}}, 'PDO': {'domain': {'latitude': (20.0, 70), 'longitude': (110, 260)}}, 'AllMW': {'domain': {'latitude': (-40.0, 45.0), 'longitude': (0.0, 360.0)}}, 'AllM': {'domain': {'latitude': (-45.0, 45.0), 'longitude': (0.0, 360.0)}}, 'NAMM': {'domain': {'latitude': (0.0, 45.0), 'longitude': (210.0, 310.0)}}, 'SAMM': {'domain': {'latitude': (-45.0, 0.0), 'longitude': (240.0, 330.0)}}, 'NAFM': {'domain': {'latitude': (0.0, 45.0), 'longitude': (310.0, 60.0)}}, 'SAFM': {'domain': {'latitude': (-45.0, 0.0), 'longitude': (0.0, 90.0)}}, 'ASM': {'domain': {'latitude': (0.0, 45.0), 'longitude': (60.0, 180.0)}}, 'AUSM': {'domain': {'latitude': (-45.0, 0.0), 'longitude': (90.0, 160.0)}}, 'AIR': {'domain': {'latitude': (7.0, 25.0), 'longitude': (65.0, 85.0)}}, 'AUS': {'domain': {'latitude': (-20.0, -10.0), 'longitude': (120.0, 150.0)}}, 'Sahel': {'domain': {'latitude': (13.0, 18.0), 'longitude': (-10.0, 10.0)}}, 'GoG': {'domain': {'latitude': (0.0, 5.0), 'longitude': (-10.0, 10.0)}}, 'NAmo': {'domain': {'latitude': (20.0, 37.0), 'longitude': (-112.0, -103.0)}}, 'SAmo': {'domain': {'latitude': (-20.0, 2.5), 'longitude': (-65.0, -40.0)}}} \n", + " regions: {'rlut': ['Global']} \n", + " test_data_path: demo_data/CMIP5_demo_clims/ \n", + " reference_data_path: demo_data/obs4MIPs_PCMDI_clims \n", + " metrics_output_path: demo_output/Ex3 \n", + " debug: False \n", + "\n", + "--- prepare mean climate metrics calculation ---\n", + "--- start mean climate metrics calculation ---\n", + "varname: rlut\n", + "level: None\n", + "reference_data_set (all): ['alternate1', 'default']\n", + "ref: alternate1\n", + "ref_data_full_path: demo_data/obs4MIPs_PCMDI_clims/rlut/CERES-EBAF-4-0/v20210804/rlut_mon_CERES-EBAF-4-0_PCMDI_gn.200301-201812.AC.v20210804.nc\n", + "-----------------------\n", + "model, run: ACCESS1-0 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.ACCESS1-0.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: Global\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "ref: default\n", + "ref_data_full_path: demo_data/obs4MIPs_PCMDI_clims/rlut/CERES-EBAF-4-1/v20210804/rlut_mon_CERES-EBAF-4-1_PCMDI_gn.200301-201812.AC.v20210804.nc\n", + "-----------------------\n", + "model, run: ACCESS1-0 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.ACCESS1-0.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: Global\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "pmp mean clim driver completed\n" ] } ], @@ -1199,53 +1489,175 @@ "metadata": {}, "source": [ "### Using custom regions \n", - "This example specifies additional regions for the analysis. The predefined regions that can be set by the `--regions` flag can be found in [default_regions.py](https://github.com/PCMDI/pcmdi_metrics/blob/master/share/default_regions.py). By default, the mean climate driver will run \"Global\", \"NHEX\", \"SHEX\", and \"Tropics\"." + "This example specifies additional regions for the analysis. The predefined regions that can be set by the `--regions` flag can be found in [default_regions.py](https://github.com/PCMDI/pcmdi_metrics/blob/master/share/default_regions.py). By default, the mean climate driver will run \"global\", \"NHEX\", \"SHEX\", and \"TROPICS\"." ] }, { "cell_type": "code", - "execution_count": 7, - "metadata": {}, + "execution_count": 2, + "metadata": { + "scrolled": true, + "tags": [] + }, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex4:: REGION: land\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex4:: alternate1 is an obs\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex4:: Could not figure out obs mask name from obs json file\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex4:: TEST DATA IS: ACCESS1-0\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex4:: ACCESS1-0 is a model\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex4:: Auto generating sftlf for model /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_data/CMIP5_demo_clims/cmip5.historical.ACCESS1-0.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex4:: Auto generated sftlf for model ACCESS1-0\n", - "/opt/anaconda3/envs/pcmdi_metrics_dev/lib/python3.9/site-packages/numpy/ma/core.py:1015: RuntimeWarning: overflow encountered in multiply\n", - " result = self.f(da, db, *args, **kwargs)\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex4:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut_2.5x2.5_regrid2_linear_metrics\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex4:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut_2.5x2.5_regrid2_linear_metrics.json\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex4:: TEST DATA IS: CanCM4\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex4:: CanCM4 is a model\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex4:: Auto generating sftlf for model /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_data/CMIP5_demo_clims/cmip5.historical.CanCM4.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex4:: Auto generated sftlf for model CanCM4\n", - "/opt/anaconda3/envs/pcmdi_metrics_dev/lib/python3.9/site-packages/numpy/ma/core.py:1015: RuntimeWarning: overflow encountered in multiply\n", - " result = self.f(da, db, *args, **kwargs)\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex4:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut_2.5x2.5_regrid2_linear_metrics\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex4:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut_2.5x2.5_regrid2_linear_metrics.json\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex4:: default is an obs\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex4:: Could not figure out obs mask name from obs json file\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex4:: TEST DATA IS: ACCESS1-0\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex4:: ACCESS1-0 is a model\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex4:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut_2.5x2.5_regrid2_linear_metrics\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex4:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut_2.5x2.5_regrid2_linear_metrics.json\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex4:: TEST DATA IS: CanCM4\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex4:: CanCM4 is a model\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex4:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut_2.5x2.5_regrid2_linear_metrics\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex4:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut_2.5x2.5_regrid2_linear_metrics.json\n" + "2023-02-21 10:40:26,762 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 10:40::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-21 10:40:48,167 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-21 10:40:48,190 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 10:41::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-21 10:41:09,002 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-21 10:41:10,601 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 10:41::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "2023-02-21 10:41:37,682 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "2023-02-21 10:41:37,700 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 10:41::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "2023-02-21 10:41:58,764 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "INFO::2023-02-21 10:41::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut_2.5x2.5_regrid2_metrics.json\n", + "2023-02-21 10:41:58,770 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut_2.5x2.5_regrid2_metrics.json\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "case_id: Ex4 \n", + " test_data_set: ['ACCESS1-0', 'CanCM4'] \n", + " realization: \n", + " vars: ['rlut'] \n", + " reference_data_set: ['all'] \n", + " target_grid: 2.5x2.5 \n", + " regrid_tool: regrid2 \n", + " regrid_tool_ocn: esmf \n", + " save_test_clims: False \n", + " test_clims_interpolated_output: None \n", + " filename_template: cmip5.historical.%(model_version).r1i1p1.mon.%(variable).198101-200512.AC.v20200426.nc \n", + " sftlf_filename_template: sftlf_%(model_version).nc \n", + " generate_sftlf: True \n", + " regions_specs: {'MyDomain': {'domain': {'latitude': (20.0, 30)}}} \n", + " regions: {'rlut': ['MyDomain']} \n", + " test_data_path: demo_data/CMIP5_demo_clims/ \n", + " reference_data_path: demo_data/obs4MIPs_PCMDI_clims \n", + " metrics_output_path: demo_output/Ex4 \n", + " debug: False \n", + "\n", + "--- prepare mean climate metrics calculation ---\n", + "--- start mean climate metrics calculation ---\n", + "varname: rlut\n", + "level: None\n", + "reference_data_set (all): ['alternate1', 'default']\n", + "ref: alternate1\n", + "ref_data_full_path: demo_data/obs4MIPs_PCMDI_clims/rlut/CERES-EBAF-4-0/v20210804/rlut_mon_CERES-EBAF-4-0_PCMDI_gn.200301-201812.AC.v20210804.nc\n", + "-----------------------\n", + "model, run: ACCESS1-0 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.ACCESS1-0.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: MyDomain\n", + "spatial subset done\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "-----------------------\n", + "model, run: CanCM4 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.CanCM4.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: MyDomain\n", + "spatial subset done\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "ref: default\n", + "ref_data_full_path: demo_data/obs4MIPs_PCMDI_clims/rlut/CERES-EBAF-4-1/v20210804/rlut_mon_CERES-EBAF-4-1_PCMDI_gn.200301-201812.AC.v20210804.nc\n", + "-----------------------\n", + "model, run: ACCESS1-0 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.ACCESS1-0.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: MyDomain\n", + "spatial subset done\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "-----------------------\n", + "model, run: CanCM4 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.CanCM4.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: MyDomain\n", + "spatial subset done\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "pmp mean clim driver completed\n" ] } ], "source": [ "%%bash\n", - "mean_climate_driver.py -p basic_param.py --case_id 'Ex4' --regions '{\"rlut\": [\"land\"]}'" + "mean_climate_driver.py -p basic_param.py \\\n", + "--case_id 'Ex4' --regions '{\"rlut\": [\"MyDomain\"]}' \\\n", + "--regions_specs \"{'MyDomain': {'domain': {'latitude': (20.0, 30)}}}\"" ] }, { @@ -1259,7 +1671,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "It is not currently possible to edit the region definitions from the command line. This is controlled by the variable `regions_specs` in the parameter file. For example, a custom region for Antarctica could be defined with `regions_specs = {'ANT': {'value': 100, 'domain': cdutil.region.domain(latitude=(-60, -90))}}` in the parameter file. \n", + "Custom domain is controlled by the variable `regions_specs` in the parameter file or command line. For example, a custom region for Antarctica over land could be defined with `regions_specs = {'ANT': {'value': 100, 'domain': {'latitude': (-90, -60)}}}` in the parameter file. `value` indicate masking, 100: land only, 0: ocean only.\n", "\n", "The command to use the custom region would look like `--regions '{\"rlut\": [\"ANT\"]}'` in the command line or `regions = {\"rlut\": [\"ANT\"]}` in the parameter file." ] @@ -1287,51 +1699,140 @@ { "cell_type": "code", "execution_count": 8, - "metadata": {}, + "metadata": { + "scrolled": true, + "tags": [] + }, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: REGION: global\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: alternate1 is an obs\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: Could not figure out obs mask name from obs json file\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: TEST DATA IS: CanCM4\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: CanCM4 is a model\n", - "/opt/anaconda3/envs/pcmdi_metrics_dev/lib/python3.9/site-packages/cdms2/fvariable.py:103: FutureWarning: Using a non-tuple sequence for multidimensional indexing is deprecated; use `arr[tuple(seq)]` instead of `arr[seq]`. In the future this will be interpreted as an array index, `arr[np.array(seq)]`, which will result either in an error or a different result.\n", - " result = result[revlist]\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/zg-500_2.5x2.5_regrid2_linear_metrics\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/zg-500_2.5x2.5_regrid2_linear_metrics.json\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: REGION: NHEX\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: alternate1 is an obs\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: Could not figure out obs mask name from obs json file\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: TEST DATA IS: CanCM4\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: CanCM4 is a model\n", - "/opt/anaconda3/envs/pcmdi_metrics_dev/lib/python3.9/site-packages/cdms2/fvariable.py:103: FutureWarning: Using a non-tuple sequence for multidimensional indexing is deprecated; use `arr[tuple(seq)]` instead of `arr[seq]`. In the future this will be interpreted as an array index, `arr[np.array(seq)]`, which will result either in an error or a different result.\n", - " result = result[revlist]\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/zg-500_2.5x2.5_regrid2_linear_metrics\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/zg-500_2.5x2.5_regrid2_linear_metrics.json\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: REGION: SHEX\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: alternate1 is an obs\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: Could not figure out obs mask name from obs json file\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: TEST DATA IS: CanCM4\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: CanCM4 is a model\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/zg-500_2.5x2.5_regrid2_linear_metrics\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/zg-500_2.5x2.5_regrid2_linear_metrics.json\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: REGION: TROPICS\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: alternate1 is an obs\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: Could not figure out obs mask name from obs json file\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: TEST DATA IS: CanCM4\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: CanCM4 is a model\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/zg-500_2.5x2.5_regrid2_linear_metrics\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/zg-500_2.5x2.5_regrid2_linear_metrics.json\n" + "2023-02-20 09:03:54,726 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-20 09:05::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/zg_500/CanCM4_zg_500_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-20 09:05:24,208 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/zg_500/CanCM4_zg_500_2.5x2.5_regrid2_metrics_alternate1.json\n", + "INFO::2023-02-20 09:05::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/zg_500_2.5x2.5_regrid2_metrics.json\n", + "2023-02-20 09:05:33,596 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/zg_500_2.5x2.5_regrid2_metrics.json\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "case_id: Ex6 \n", + " test_data_set: ['CanCM4'] \n", + " realization: \n", + " vars: ['zg_500'] \n", + " reference_data_set: ['alternate1'] \n", + " target_grid: 2.5x2.5 \n", + " regrid_tool: regrid2 \n", + " regrid_tool_ocn: esmf \n", + " save_test_clims: False \n", + " test_clims_interpolated_output: None \n", + " filename_template: cmip5.historical.%(model_version).r1i1p1.mon.%(variable).198101-200512.AC.v20200426.nc \n", + " sftlf_filename_template: sftlf_%(model_version).nc \n", + " generate_sftlf: True \n", + " regions_specs: {'NHEX': {'domain': {'latitude': (30.0, 90)}}, 'SHEX': {'domain': {'latitude': (-90.0, -30)}}, 'TROPICS': {'domain': {'latitude': (-30.0, 30)}}, 'global': {}, '90S50S': {'domain': {'latitude': (-90.0, -50)}}, '50S20S': {'domain': {'latitude': (-50.0, -20)}}, '20S20N': {'domain': {'latitude': (-20.0, 20)}}, '20N50N': {'domain': {'latitude': (20.0, 50)}}, '50N90N': {'domain': {'latitude': (50.0, 90)}}, 'land_NHEX': {'value': 100, 'domain': {'latitude': (30.0, 90)}}, 'land_SHEX': {'value': 100, 'domain': {'latitude': (-90.0, -30)}}, 'land_TROPICS': {'value': 100, 'domain': {'latitude': (-30.0, 30)}}, 'land': {'value': 100}, 'ocean_NHEX': {'value': 0, 'domain': {'latitude': (30.0, 90)}}, 'ocean_SHEX': {'value': 0, 'domain': {'latitude': (-90.0, -30)}}, 'ocean_TROPICS': {'value': 0, 'domain': {'latitude': (30.0, 30)}}, 'ocean': {'value': 0}, 'CONUS': {'domain': {'latitude': (24.7, 49.4), 'longitude': (-124.78, -66.92)}}, 'land_CONUS': {'value': 100, 'domain': {'latitude': (24.7, 49.4), 'longitude': (-124.78, -66.92)}}, 'NAM': {'domain': {'latitude': (20.0, 90), 'longitude': (-180, 180)}}, 'NAO': {'domain': {'latitude': (20.0, 80), 'longitude': (-90, 40)}}, 'SAM': {'domain': {'latitude': (-20.0, -90), 'longitude': (0, 360)}}, 'PNA': {'domain': {'latitude': (20.0, 85), 'longitude': (120, 240)}}, 'PDO': {'domain': {'latitude': (20.0, 70), 'longitude': (110, 260)}}, 'AllMW': {'domain': {'latitude': (-40.0, 45.0), 'longitude': (0.0, 360.0)}}, 'AllM': {'domain': {'latitude': (-45.0, 45.0), 'longitude': (0.0, 360.0)}}, 'NAMM': {'domain': {'latitude': (0.0, 45.0), 'longitude': (210.0, 310.0)}}, 'SAMM': {'domain': {'latitude': (-45.0, 0.0), 'longitude': (240.0, 330.0)}}, 'NAFM': {'domain': {'latitude': (0.0, 45.0), 'longitude': (310.0, 60.0)}}, 'SAFM': {'domain': {'latitude': (-45.0, 0.0), 'longitude': (0.0, 90.0)}}, 'ASM': {'domain': {'latitude': (0.0, 45.0), 'longitude': (60.0, 180.0)}}, 'AUSM': {'domain': {'latitude': (-45.0, 0.0), 'longitude': (90.0, 160.0)}}, 'AIR': {'domain': {'latitude': (7.0, 25.0), 'longitude': (65.0, 85.0)}}, 'AUS': {'domain': {'latitude': (-20.0, -10.0), 'longitude': (120.0, 150.0)}}, 'Sahel': {'domain': {'latitude': (13.0, 18.0), 'longitude': (-10.0, 10.0)}}, 'GoG': {'domain': {'latitude': (0.0, 5.0), 'longitude': (-10.0, 10.0)}}, 'NAmo': {'domain': {'latitude': (20.0, 37.0), 'longitude': (-112.0, -103.0)}}, 'SAmo': {'domain': {'latitude': (-20.0, 2.5), 'longitude': (-65.0, -40.0)}}} \n", + " regions: {'rlut': ['Global']} \n", + " test_data_path: demo_data/CMIP5_demo_clims/ \n", + " reference_data_path: demo_data/obs4MIPs_PCMDI_clims \n", + " metrics_output_path: demo_output/Ex6 \n", + " debug: False \n", + "\n", + "--- prepare mean climate metrics calculation ---\n", + "--- start mean climate metrics calculation ---\n", + "varname: zg\n", + "level: 500.0\n", + "ref: alternate1\n", + "ref_data_full_path: demo_data/obs4MIPs_PCMDI_clims/zg/ERA-INT/v20210804/zg_mon_ERA-INT_PCMDI_gn.200301-201812.AC.v20210804.nc\n", + "-----------------------\n", + "model, run: CanCM4 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.CanCM4.r1i1p1.mon.zg.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: global\n", + "compute metrics start\n", + "var: zg\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "region: NHEX\n", + "spatial subset done\n", + "compute metrics start\n", + "var: zg\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "region: SHEX\n", + "spatial subset done\n", + "compute metrics start\n", + "var: zg\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "region: TROPICS\n", + "spatial subset done\n", + "compute metrics start\n", + "var: zg\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "pmp mean clim driver completed\n" ] } ], "source": [ "%%bash\n", "mean_climate_driver.py -p basic_param.py \\\n", - "--case_id 'Ex6' \\\n", + "--case_id 'Ex5' \\\n", "--vars 'zg_500' \\\n", "--test_data_set 'CanCM4' \\\n", "--reference_data_set \"alternate1\"" @@ -1363,37 +1864,155 @@ "name": "stderr", "output_type": "stream", "text": [ - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: REGION: Global\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: alternate1 is an obs\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: Could not figure out obs mask name from obs json file\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: TEST DATA IS: ACCESS1-0\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: ACCESS1-0 is a model\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: Saving interpolated climatologies to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/Global\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: Results saved to a nc file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/Global.nc\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/rlut_2.5x2.5_regrid2_linear_metrics\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/rlut_2.5x2.5_regrid2_linear_metrics.json\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: TEST DATA IS: CanCM4\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: CanCM4 is a model\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: Saving interpolated climatologies to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/Global\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: Results saved to a nc file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/Global.nc\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/rlut_2.5x2.5_regrid2_linear_metrics\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/rlut_2.5x2.5_regrid2_linear_metrics.json\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: default is an obs\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: Could not figure out obs mask name from obs json file\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: TEST DATA IS: ACCESS1-0\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: ACCESS1-0 is a model\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/rlut_2.5x2.5_regrid2_linear_metrics\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/rlut_2.5x2.5_regrid2_linear_metrics.json\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: TEST DATA IS: CanCM4\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: CanCM4 is a model\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/rlut_2.5x2.5_regrid2_linear_metrics\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/rlut_2.5x2.5_regrid2_linear_metrics.json\n" + "2023-02-20 09:05:39,761 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-20 09:06::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-20 09:06:08,881 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-20 09:06:08,903 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-20 09:06::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-20 09:06:42,569 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-20 09:06:44,191 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-20 09:07::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "2023-02-20 09:07:13,173 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "2023-02-20 09:07:13,196 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-20 09:07::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "2023-02-20 09:07:43,353 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "INFO::2023-02-20 09:07::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/rlut_2.5x2.5_regrid2_metrics.json\n", + "2023-02-20 09:07:53,368 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/rlut_2.5x2.5_regrid2_metrics.json\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "case_id: Ex5 \n", + " test_data_set: ['ACCESS1-0', 'CanCM4'] \n", + " realization: \n", + " vars: ['rlut'] \n", + " reference_data_set: ['all'] \n", + " target_grid: 2.5x2.5 \n", + " regrid_tool: regrid2 \n", + " regrid_tool_ocn: esmf \n", + " save_test_clims: True \n", + " test_clims_interpolated_output: None \n", + " filename_template: cmip5.historical.%(model_version).r1i1p1.mon.%(variable).198101-200512.AC.v20200426.nc \n", + " sftlf_filename_template: sftlf_%(model_version).nc \n", + " generate_sftlf: True \n", + " regions_specs: {'NHEX': {'domain': {'latitude': (30.0, 90)}}, 'SHEX': {'domain': {'latitude': (-90.0, -30)}}, 'TROPICS': {'domain': {'latitude': (-30.0, 30)}}, 'global': {}, '90S50S': {'domain': {'latitude': (-90.0, -50)}}, '50S20S': {'domain': {'latitude': (-50.0, -20)}}, '20S20N': {'domain': {'latitude': (-20.0, 20)}}, '20N50N': {'domain': {'latitude': (20.0, 50)}}, '50N90N': {'domain': {'latitude': (50.0, 90)}}, 'land_NHEX': {'value': 100, 'domain': {'latitude': (30.0, 90)}}, 'land_SHEX': {'value': 100, 'domain': {'latitude': (-90.0, -30)}}, 'land_TROPICS': {'value': 100, 'domain': {'latitude': (-30.0, 30)}}, 'land': {'value': 100}, 'ocean_NHEX': {'value': 0, 'domain': {'latitude': (30.0, 90)}}, 'ocean_SHEX': {'value': 0, 'domain': {'latitude': (-90.0, -30)}}, 'ocean_TROPICS': {'value': 0, 'domain': {'latitude': (30.0, 30)}}, 'ocean': {'value': 0}, 'CONUS': {'domain': {'latitude': (24.7, 49.4), 'longitude': (-124.78, -66.92)}}, 'land_CONUS': {'value': 100, 'domain': {'latitude': (24.7, 49.4), 'longitude': (-124.78, -66.92)}}, 'NAM': {'domain': {'latitude': (20.0, 90), 'longitude': (-180, 180)}}, 'NAO': {'domain': {'latitude': (20.0, 80), 'longitude': (-90, 40)}}, 'SAM': {'domain': {'latitude': (-20.0, -90), 'longitude': (0, 360)}}, 'PNA': {'domain': {'latitude': (20.0, 85), 'longitude': (120, 240)}}, 'PDO': {'domain': {'latitude': (20.0, 70), 'longitude': (110, 260)}}, 'AllMW': {'domain': {'latitude': (-40.0, 45.0), 'longitude': (0.0, 360.0)}}, 'AllM': {'domain': {'latitude': (-45.0, 45.0), 'longitude': (0.0, 360.0)}}, 'NAMM': {'domain': {'latitude': (0.0, 45.0), 'longitude': (210.0, 310.0)}}, 'SAMM': {'domain': {'latitude': (-45.0, 0.0), 'longitude': (240.0, 330.0)}}, 'NAFM': {'domain': {'latitude': (0.0, 45.0), 'longitude': (310.0, 60.0)}}, 'SAFM': {'domain': {'latitude': (-45.0, 0.0), 'longitude': (0.0, 90.0)}}, 'ASM': {'domain': {'latitude': (0.0, 45.0), 'longitude': (60.0, 180.0)}}, 'AUSM': {'domain': {'latitude': (-45.0, 0.0), 'longitude': (90.0, 160.0)}}, 'AIR': {'domain': {'latitude': (7.0, 25.0), 'longitude': (65.0, 85.0)}}, 'AUS': {'domain': {'latitude': (-20.0, -10.0), 'longitude': (120.0, 150.0)}}, 'Sahel': {'domain': {'latitude': (13.0, 18.0), 'longitude': (-10.0, 10.0)}}, 'GoG': {'domain': {'latitude': (0.0, 5.0), 'longitude': (-10.0, 10.0)}}, 'NAmo': {'domain': {'latitude': (20.0, 37.0), 'longitude': (-112.0, -103.0)}}, 'SAmo': {'domain': {'latitude': (-20.0, 2.5), 'longitude': (-65.0, -40.0)}}} \n", + " regions: {'rlut': ['Global']} \n", + " test_data_path: demo_data/CMIP5_demo_clims/ \n", + " reference_data_path: demo_data/obs4MIPs_PCMDI_clims \n", + " metrics_output_path: demo_output/Ex5 \n", + " debug: False \n", + "\n", + "--- prepare mean climate metrics calculation ---\n", + "--- start mean climate metrics calculation ---\n", + "varname: rlut\n", + "level: None\n", + "reference_data_set (all): ['alternate1', 'default']\n", + "ref: alternate1\n", + "ref_data_full_path: demo_data/obs4MIPs_PCMDI_clims/rlut/CERES-EBAF-4-0/v20210804/rlut_mon_CERES-EBAF-4-0_PCMDI_gn.200301-201812.AC.v20210804.nc\n", + "-----------------------\n", + "model, run: ACCESS1-0 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.ACCESS1-0.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: Global\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "-----------------------\n", + "model, run: CanCM4 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.CanCM4.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: Global\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "ref: default\n", + "ref_data_full_path: demo_data/obs4MIPs_PCMDI_clims/rlut/CERES-EBAF-4-1/v20210804/rlut_mon_CERES-EBAF-4-1_PCMDI_gn.200301-201812.AC.v20210804.nc\n", + "-----------------------\n", + "model, run: ACCESS1-0 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.ACCESS1-0.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: Global\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "-----------------------\n", + "model, run: CanCM4 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.CanCM4.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: Global\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "pmp mean clim driver completed\n" ] } ], "source": [ "%%bash \n", - "mean_climate_driver.py -p basic_param.py --case_id 'Ex5' --user_notes 'Example note' --save_test_clims True" + "mean_climate_driver.py -p basic_param.py \\\n", + "--case_id 'Ex6' --user_notes 'Example note' --save_test_clims True" ] }, { @@ -1411,9 +2030,9 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "Python [conda env:pmp_devel_20230218] *", "language": "python", - "name": "python3" + "name": "conda-env-pmp_devel_20230218-py" }, "language_info": { "codemirror_mode": { From b21f74f49a9d56bc06be3d15a04743ce8421c84a Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Tue, 21 Feb 2023 13:30:28 -0800 Subject: [PATCH 122/130] bug fix --- .../mean_climate/lib/compute_metrics.py | 35 ++++++++++++------- .../lib/create_mean_climate_parser.py | 9 +++++ .../mean_climate/lib/load_and_regrid.py | 35 +++++++++++++++++-- .../lib/mean_climate_metrics_to_json.py | 2 +- .../mean_climate/mean_climate_driver.py | 22 +++++------- 5 files changed, 74 insertions(+), 29 deletions(-) diff --git a/pcmdi_metrics/mean_climate/lib/compute_metrics.py b/pcmdi_metrics/mean_climate/lib/compute_metrics.py index 1b85f5723..7d1e842f3 100644 --- a/pcmdi_metrics/mean_climate/lib/compute_metrics.py +++ b/pcmdi_metrics/mean_climate/lib/compute_metrics.py @@ -1,14 +1,14 @@ -import collections +from collections import OrderedDict import pcmdi_metrics -def compute_metrics(Var, dm, do): +def compute_metrics(Var, dm, do, debug=False): # Var is sometimes sent with level associated var = Var.split("_")[0] # Did we send data? Or do we just want the info? if dm is None and do is None: - metrics_defs = collections.OrderedDict() + metrics_defs = OrderedDict() metrics_defs["rms_xyt"] = pcmdi_metrics.mean_climate.lib.rms_xyt(None, None) metrics_defs["rms_xy"] = pcmdi_metrics.mean_climate.lib.rms_xy(None, None) metrics_defs["rmsc_xy"] = pcmdi_metrics.mean_climate.lib.rmsc_xy(None, None) @@ -31,15 +31,26 @@ def compute_metrics(Var, dm, do): # cdms.setAutoBounds("on") print('var: ', var) + # unify time and time bounds between observation and model + if debug: + print('before time and time bounds unifying') + print('dm.time: ', dm['time']) + print('do.time: ', do['time']) + # Below is temporary... - do['time'] = dm['time'] - do['time_bnds'] = dm['time_bnds'] - print('do.time: ', do['time']) + dm['time'] = do['time'] + dm[dm.time.attrs['bounds']] = do[do.time.attrs['bounds']] + + if debug: + print('after time and time bounds unifying') + print('dm.time: ', dm['time']) + print('do.time: ', do['time']) - # dm.to_netcdf('dm.nc') - # do.to_netcdf('do.nc') + #if debug: + # dm.to_netcdf('dm.nc') + # do.to_netcdf('do.nc') - metrics_dictionary = {} + metrics_dictionary = OrderedDict() # SET CONDITIONAL ON INPUT VARIABLE if var == "pr": @@ -114,7 +125,7 @@ def compute_metrics(Var, dm, do): stdObs_xy_devzm = pcmdi_metrics.mean_climate.lib.std_xy(do_am_devzm, var, weights=do.spatial.get_weights(axis=['X', 'Y'])) std_xy_devzm = pcmdi_metrics.mean_climate.lib.std_xy(dm_am_devzm, var, weights=dm.spatial.get_weights(axis=['X', 'Y'])) - for stat in [ + for stat in sorted([ "std-obs_xy", "std_xy", "std-obs_xyt", @@ -131,8 +142,8 @@ def compute_metrics(Var, dm, do): "mae_xy", "rms_y", "rms_devzm", - ]: - metrics_dictionary[stat] = {} + ]): + metrics_dictionary[stat] = OrderedDict() metrics_dictionary["mean-obs_xy"]["ann"] = format(meanObs_xy * conv, sig_digits) metrics_dictionary["mean_xy"]["ann"] = format(mean_xy * conv, sig_digits) diff --git a/pcmdi_metrics/mean_climate/lib/create_mean_climate_parser.py b/pcmdi_metrics/mean_climate/lib/create_mean_climate_parser.py index 937ed95b3..85dbf3742 100644 --- a/pcmdi_metrics/mean_climate/lib/create_mean_climate_parser.py +++ b/pcmdi_metrics/mean_climate/lib/create_mean_climate_parser.py @@ -39,6 +39,15 @@ def create_mean_climate_parser(): help="Users can customize regions values names", required=False, ) + + parser.add_argument( + "--regions_specs", + type=ast.literal_eval, + dest="regions_specs", + help="Users can customize regions", + default=None, + required=False, + ) parser.add_argument( "-r", diff --git a/pcmdi_metrics/mean_climate/lib/load_and_regrid.py b/pcmdi_metrics/mean_climate/lib/load_and_regrid.py index 356d7301e..2e1231e1a 100644 --- a/pcmdi_metrics/mean_climate/lib/load_and_regrid.py +++ b/pcmdi_metrics/mean_climate/lib/load_and_regrid.py @@ -1,6 +1,7 @@ from pcmdi_metrics.io import xcdat_open import cftime import xcdat as xc +import numpy as np def load_and_regrid(data_path, varname, level=None, t_grid=None, decode_times=True, regrid_tool='regrid2', debug=False): """Load data and regrid to target grid @@ -14,20 +15,48 @@ def load_and_regrid(data_path, varname, level=None, t_grid=None, decode_times=Tr regrid_tool (str): Name of the regridding tool. See https://xcdat.readthedocs.io/en/stable/generated/xarray.Dataset.regridder.horizontal.html for more info debug (bool): Default is False. If True, print more info to help debugging process """ + if debug: + print('load_and_regrid start') + # load data ds = xcdat_open(data_path, data_var=varname, decode_times=decode_times) # NOTE: decode_times=False will be removed once obs4MIP written using xcdat + + # calendar quality check + if "calendar" in list(ds.time.attrs.keys()): + if debug: + print('ds.time.attrs["calendar"]:', ds.time.attrs["calendar"]) + if 'calendar' in ds.attrs.keys(): + if debug: + print('ds.calendar:', ds.calendar) + if ds.calendar != ds.time.attrs["calendar"]: + print('[WARNING]: calendar info mismatch. ds.time.attrs["calendar"] is adjusted to ds.calendar') + ds.time.attrs["calendar"] = ds.calendar + else: + if 'calendar' in ds.attrs.keys(): + ds.time.attrs["calendar"] = ds.calendar + # time bound check -- add proper time bound info if cdms-generated annual cycle is loaded - if not isinstance(ds.time.values[0], cftime._cftime.DatetimeProlepticGregorian) and "units" not in list(ds.time.attrs.keys()): + if isinstance(ds.time.values[0], np.float64): # and "units" not in list(ds.time.attrs.keys()): ds.time.attrs['units'] = "days since 0001-01-01" ds = xc.decode_time(ds) - # level + if debug: + print('decode_time done') + + # level - extract a specific level if needed if level is not None: level = level * 100 # hPa to Pa ds = ds.sel(plev=level) if debug: print('ds:', ds) + # regrid - ds_regridded = ds.regridder.horizontal(varname, t_grid, tool=regrid_tool) + if regrid_tool == 'regrid2': + ds_regridded = ds.regridder.horizontal(varname, t_grid, tool=regrid_tool) + elif regrid_tool in ['esmf', 'xesmf']: + regrid_tool = 'xesmf' + regrid_method = 'bilinear' + ds_regridded = ds.regridder.horizontal(varname, t_grid, tool=regrid_tool, method=regrid_method) + if debug: print('ds_regridded:', ds_regridded) return ds_regridded diff --git a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_to_json.py b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_to_json.py index baafc8137..b614f3559 100644 --- a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_to_json.py +++ b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_to_json.py @@ -41,7 +41,7 @@ def mean_climate_metrics_to_json( indent=4, separators=(",", ": "), mode="r+", - sort_keys=True, + sort_keys=False, ) if debug: diff --git a/pcmdi_metrics/mean_climate/mean_climate_driver.py b/pcmdi_metrics/mean_climate/mean_climate_driver.py index dd4627f18..a40e22048 100755 --- a/pcmdi_metrics/mean_climate/mean_climate_driver.py +++ b/pcmdi_metrics/mean_climate/mean_climate_driver.py @@ -4,6 +4,7 @@ import json import os from re import split +from collections import OrderedDict import cdms2 import cdutil @@ -22,13 +23,7 @@ parser = create_mean_climate_parser() -#parameter = parser.get_parameter(cmd_default_vars=False, argparse_vals_only=False) -#parameter = parser.get_parameter(cmd_default_vars=True, argparse_vals_only=False) parameter = parser.get_parameter(argparse_vals_only=False) -#parameter = parser.get_parameter() -#print(parameter) -#import sys -#sys.exit('test') # parameters case_id = parameter.case_id @@ -63,7 +58,10 @@ else: realizations = [realization] -if not bool(regions_specs): +if debug: + print('regions_specs (before loading internally defined):', regions_specs) + +if regions_specs is None or not bool(regions_specs): regions_specs = load_regions_specs() default_regions = ['global', 'NHEX', 'SHEX', 'TROPICS'] @@ -159,7 +157,7 @@ print('ref_data_full_path:', ref_data_full_path) # load data and regrid ds_ref = load_and_regrid(ref_data_full_path, varname, level, t_grid, decode_times=False, regrid_tool=regrid_tool, debug=debug) - ds_ref_dict = dict() + ds_ref_dict = OrderedDict() # ---------- # model loop @@ -188,7 +186,7 @@ print('test_data (model in this case) full_path:', test_data_full_path) #try: if 1: - ds_test_dict = dict() + ds_test_dict = OrderedDict() # load data and regrid ds_test = load_and_regrid(test_data_full_path, varname, level, t_grid, decode_times=True, regrid_tool=regrid_tool, debug=debug) @@ -225,7 +223,6 @@ ds_test_dict[region] = ds_test_tmp if region not in list(ds_ref_dict.keys()): ds_ref_dict[region] = region_subset(ds_ref_tmp, regions_specs, region=region) - print('spatial subset done') if debug: @@ -235,7 +232,7 @@ # compute metrics print('compute metrics start') - result_dict["RESULTS"][model][ref][run][region] = compute_metrics(varname, ds_test_dict[region], ds_ref_dict[region]) + result_dict["RESULTS"][model][ref][run][region] = compute_metrics(varname, ds_test_dict[region], ds_ref_dict[region], debug=debug) # write individual JSON # --- single simulation, obs (need to accumulate later) / single variable @@ -254,7 +251,6 @@ print('error occured for ', model, run) print(e) """ - """ # write collective JSON --- all models / all obs / single variable json_filename = "_".join([var, target_grid, regrid_tool, "metrics"]) mean_climate_metrics_to_json( @@ -263,4 +259,4 @@ result_dict, cmec_flag=cmec, ) - """ + print('pmp mean clim driver completed') From 03364d00af0f1e86dc6f68af3f19f4c4c7003b1e Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Tue, 21 Feb 2023 13:30:49 -0800 Subject: [PATCH 123/130] clean up --- share/DefArgsCIA.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/share/DefArgsCIA.json b/share/DefArgsCIA.json index 8507f33ba..cd33a055d 100644 --- a/share/DefArgsCIA.json +++ b/share/DefArgsCIA.json @@ -163,4 +163,4 @@ ], "help":"A list of variables to be processed" } -} +} \ No newline at end of file From 80c72c9f0bf7d3f95f689b81798e75fa9634b3c2 Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Tue, 21 Feb 2023 13:31:23 -0800 Subject: [PATCH 124/130] clean up --- share/DefArgsCIA.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/share/DefArgsCIA.json b/share/DefArgsCIA.json index cd33a055d..8507f33ba 100644 --- a/share/DefArgsCIA.json +++ b/share/DefArgsCIA.json @@ -163,4 +163,4 @@ ], "help":"A list of variables to be processed" } -} \ No newline at end of file +} From 72aa81cad52f86d7ca93b9c9f4951377c89c0439 Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Tue, 21 Feb 2023 14:18:02 -0800 Subject: [PATCH 125/130] remove xesmf from dependency -- make it as an optional... --- conda-env/dev.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/conda-env/dev.yml b/conda-env/dev.yml index b32d178e6..c8090fc75 100644 --- a/conda-env/dev.yml +++ b/conda-env/dev.yml @@ -26,7 +26,6 @@ dependencies: - regionmask=0.9.0 - rasterio=1.2.10 - shapely=1.8.0 - - xesmf=0.7.0 # Testing # ================== - pre_commit=2.20.0 From ac5250a23024865a97cb016bd1da45b5b5d87a20 Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Tue, 21 Feb 2023 14:18:46 -0800 Subject: [PATCH 126/130] change import path for pmp_parser and/or PMPParser --- pcmdi_metrics/mjo/mjo_metrics_driver.py | 5 ++++- .../variability_across_timescales_PS_driver.py | 2 +- pcmdi_metrics/variability_mode/variability_modes_driver.py | 4 +++- 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/pcmdi_metrics/mjo/mjo_metrics_driver.py b/pcmdi_metrics/mjo/mjo_metrics_driver.py index ed3a09ba5..b95bb8187 100755 --- a/pcmdi_metrics/mjo/mjo_metrics_driver.py +++ b/pcmdi_metrics/mjo/mjo_metrics_driver.py @@ -51,6 +51,9 @@ mjo_metrics_to_json, ) +from pcmdi_metrics.mean_climate.lib import pmp_parser + + # To avoid below error # OpenBLAS blas_thread_init: pthread_create failed for thread XX of 96: Resource temporarily unavailable # os.environ['OPENBLAS_NUM_THREADS'] = '1' @@ -71,7 +74,7 @@ # ================================================= # Collect user defined options # ------------------------------------------------- -P = pcmdi_metrics.driver.pmp_parser.PMPParser( +P = pmp_parser.PMPParser( description="Runs PCMDI MJO Computations", formatter_class=RawTextHelpFormatter ) P = AddParserArgument(P) diff --git a/pcmdi_metrics/precip_variability/variability_across_timescales_PS_driver.py b/pcmdi_metrics/precip_variability/variability_across_timescales_PS_driver.py index bf2af43ca..046746438 100644 --- a/pcmdi_metrics/precip_variability/variability_across_timescales_PS_driver.py +++ b/pcmdi_metrics/precip_variability/variability_across_timescales_PS_driver.py @@ -5,7 +5,7 @@ from genutil import StringConstructor -from pcmdi_metrics.driver.pmp_parser import PMPParser +from pcmdi_metrics.mean_climate.lib.pmp_parser import PMPParser from pcmdi_metrics.precip_variability.lib import ( AddParserArgument, precip_variability_across_timescale, diff --git a/pcmdi_metrics/variability_mode/variability_modes_driver.py b/pcmdi_metrics/variability_mode/variability_modes_driver.py index 318db1bfa..a94ac6d0d 100755 --- a/pcmdi_metrics/variability_mode/variability_modes_driver.py +++ b/pcmdi_metrics/variability_mode/variability_modes_driver.py @@ -83,6 +83,8 @@ variability_metrics_to_json, write_nc_output, ) +from pcmdi_metrics.mean_climate.lib import pmp_parser + # To avoid below error # OpenBLAS blas_thread_init: pthread_create failed for thread XX of 96: Resource temporarily unavailable @@ -106,7 +108,7 @@ # ================================================= # Collect user defined options # ------------------------------------------------- -P = pcmdi_metrics.driver.pmp_parser.PMPParser( +P = pmp_parser.PMPParser( description="Runs PCMDI Modes of Variability Computations", formatter_class=RawTextHelpFormatter, ) From 4cbdac81e2e3de2b7fb6ca265cceb7b4dd8d6e2d Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Tue, 21 Feb 2023 14:19:02 -0800 Subject: [PATCH 127/130] clean up --- pcmdi_metrics/mean_climate/mean_climate_driver.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/pcmdi_metrics/mean_climate/mean_climate_driver.py b/pcmdi_metrics/mean_climate/mean_climate_driver.py index a40e22048..b1c319e35 100755 --- a/pcmdi_metrics/mean_climate/mean_climate_driver.py +++ b/pcmdi_metrics/mean_climate/mean_climate_driver.py @@ -184,8 +184,7 @@ print('-----------------------') print('model, run:', model, run) print('test_data (model in this case) full_path:', test_data_full_path) - #try: - if 1: + try: ds_test_dict = OrderedDict() # load data and regrid @@ -246,11 +245,11 @@ cmec_flag=cmec, debug=debug ) - """ + except Exception as e: print('error occured for ', model, run) print(e) - """ + # write collective JSON --- all models / all obs / single variable json_filename = "_".join([var, target_grid, regrid_tool, "metrics"]) mean_climate_metrics_to_json( From a83ad538e62c334f0719832b49521271906ba809 Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Tue, 21 Feb 2023 14:29:18 -0800 Subject: [PATCH 128/130] update demo --- doc/jupyter/Demo/Demo_1b_mean_climate.ipynb | 330 ++++++++------------ 1 file changed, 122 insertions(+), 208 deletions(-) diff --git a/doc/jupyter/Demo/Demo_1b_mean_climate.ipynb b/doc/jupyter/Demo/Demo_1b_mean_climate.ipynb index 4627d175f..7689c04a9 100644 --- a/doc/jupyter/Demo/Demo_1b_mean_climate.ipynb +++ b/doc/jupyter/Demo/Demo_1b_mean_climate.ipynb @@ -135,20 +135,20 @@ "name": "stderr", "output_type": "stream", "text": [ - "2023-02-20 08:56:23,632 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", - "INFO::2023-02-20 08:56::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", - "2023-02-20 08:56:48,696 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", - "2023-02-20 08:56:48,718 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", - "INFO::2023-02-20 08:57::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", - "2023-02-20 08:57:07,713 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", - "2023-02-20 08:57:09,161 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", - "INFO::2023-02-20 08:57::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_default.json\n", - "2023-02-20 08:57:27,364 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_default.json\n", - "2023-02-20 08:57:27,382 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", - "INFO::2023-02-20 08:57::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_default.json\n", - "2023-02-20 08:57:46,468 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_default.json\n", - "INFO::2023-02-20 08:57::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut_2.5x2.5_regrid2_metrics.json\n", - "2023-02-20 08:57:46,471 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut_2.5x2.5_regrid2_metrics.json\n" + "2023-02-21 14:17:32,585 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:18::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-21 14:18:04,318 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-21 14:18:04,348 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:18::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-21 14:18:32,211 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-21 14:18:34,137 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:19::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "2023-02-21 14:19:02,987 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "2023-02-21 14:19:03,017 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:19::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "2023-02-21 14:19:34,352 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "INFO::2023-02-21 14:19::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut_2.5x2.5_regrid2_metrics.json\n", + "2023-02-21 14:19:34,357 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut_2.5x2.5_regrid2_metrics.json\n" ] }, { @@ -1201,14 +1201,69 @@ " \n", "This next cell demonstrates how to change 'case_id' and 'regrid_tool' on the command line. Changing the 'case_id' is helpful because these results will be stored in a folder with that name, separate from other runs. \n", "\n", - "The two regrid tools available are 'regrid2' and 'esmf'. 'regrid2' is recommended, but 'esmf' must be used with non-rectangular grids. \n", - " \n", - "Both the model data sets and observations are regridded to a 2.5 by 2.5 degree grid before producing statistics. To interpolate to a different grid, the user should provide a [cdms2 grid object](https://cdms.readthedocs.io/en/latest/manual/cdms_2.html#id9) as the `target_grid`." + "Both the model data sets and observations are regridded to a 2.5 by 2.5 degree grid before producing statistics. To interpolate to a different grid, the user should provide a [cdms2 grid object](https://cdms.readthedocs.io/en/latest/manual/cdms_2.html#id9) as the `target_grid`.\n", + "\n", + "The two regrid tools available are 'regrid2' and ['xesmf'](https://xesmf.readthedocs.io/en/latest/). 'regrid2' is recommended, but 'xesmf' must be used with non-rectangular grids. " ] }, { "cell_type": "code", "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'\\nimport sys\\n!conda install --yes --prefix {sys.prefix} -c conda-forge xesmf\\n'" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# for conda\n", + "\"\"\"\n", + "import sys\n", + "!conda install --yes --prefix {sys.prefix} -c conda-forge xesmf\n", + "\"\"\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "OR delete the triple quotations on lines 2&5 from this cell to install with pip:" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'\\nimport sys\\n!{sys.executable} -m pip install git+https://github.com/pangeo-data/xesmf.git\\n'" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# for pip\n", + "\"\"\"\n", + "import sys\n", + "!{sys.executable} -m pip install git+https://github.com/pangeo-data/xesmf.git\n", + "\"\"\"" + ] + }, + { + "cell_type": "code", + "execution_count": 7, "metadata": { "scrolled": true }, @@ -1217,20 +1272,20 @@ "name": "stderr", "output_type": "stream", "text": [ - "2023-02-20 08:57:54,165 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", - "INFO::2023-02-20 08:58::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut/ACCESS1-0_rlut_2.5x2.5_esmf_metrics_alternate1.json\n", - "2023-02-20 08:58:23,679 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut/ACCESS1-0_rlut_2.5x2.5_esmf_metrics_alternate1.json\n", - "2023-02-20 08:58:23,703 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", - "INFO::2023-02-20 08:58::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut/CanCM4_rlut_2.5x2.5_esmf_metrics_alternate1.json\n", - "2023-02-20 08:58:56,596 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut/CanCM4_rlut_2.5x2.5_esmf_metrics_alternate1.json\n", - "2023-02-20 08:58:59,145 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", - "INFO::2023-02-20 08:59::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut/ACCESS1-0_rlut_2.5x2.5_esmf_metrics_default.json\n", - "2023-02-20 08:59:36,824 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut/ACCESS1-0_rlut_2.5x2.5_esmf_metrics_default.json\n", - "2023-02-20 08:59:36,857 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", - "INFO::2023-02-20 09:00::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut/CanCM4_rlut_2.5x2.5_esmf_metrics_default.json\n", - "2023-02-20 09:00:05,953 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut/CanCM4_rlut_2.5x2.5_esmf_metrics_default.json\n", - "INFO::2023-02-20 09:00::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut_2.5x2.5_esmf_metrics.json\n", - "2023-02-20 09:00:17,404 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut_2.5x2.5_esmf_metrics.json\n" + "2023-02-21 14:19:44,197 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:20::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut/ACCESS1-0_rlut_2.5x2.5_xesmf_metrics_alternate1.json\n", + "2023-02-21 14:20:31,985 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut/ACCESS1-0_rlut_2.5x2.5_xesmf_metrics_alternate1.json\n", + "2023-02-21 14:20:32,017 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:21::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut/CanCM4_rlut_2.5x2.5_xesmf_metrics_alternate1.json\n", + "2023-02-21 14:21:11,672 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut/CanCM4_rlut_2.5x2.5_xesmf_metrics_alternate1.json\n", + "2023-02-21 14:21:14,865 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:21::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut/ACCESS1-0_rlut_2.5x2.5_xesmf_metrics_default.json\n", + "2023-02-21 14:21:57,643 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut/ACCESS1-0_rlut_2.5x2.5_xesmf_metrics_default.json\n", + "2023-02-21 14:21:57,670 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:22::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut/CanCM4_rlut_2.5x2.5_xesmf_metrics_default.json\n", + "2023-02-21 14:22:36,898 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut/CanCM4_rlut_2.5x2.5_xesmf_metrics_default.json\n", + "INFO::2023-02-21 14:22::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut_2.5x2.5_xesmf_metrics.json\n", + "2023-02-21 14:22:47,436 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut_2.5x2.5_xesmf_metrics.json\n" ] }, { @@ -1243,7 +1298,7 @@ " vars: ['rlut'] \n", " reference_data_set: ['all'] \n", " target_grid: 2.5x2.5 \n", - " regrid_tool: esmf \n", + " regrid_tool: xesmf \n", " regrid_tool_ocn: esmf \n", " save_test_clims: False \n", " test_clims_interpolated_output: None \n", @@ -1364,7 +1419,7 @@ ], "source": [ "%%bash\n", - "mean_climate_driver.py -p basic_param.py --case_id 'Ex2' --regrid_tool 'esmf'" + "mean_climate_driver.py -p basic_param.py --case_id 'Ex2' --regrid_tool 'xesmf'" ] }, { @@ -1377,7 +1432,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 8, "metadata": { "scrolled": true }, @@ -1386,14 +1441,14 @@ "name": "stderr", "output_type": "stream", "text": [ - "2023-02-20 09:00:23,738 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", - "INFO::2023-02-20 09:00::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex3/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", - "2023-02-20 09:00:52,873 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex3/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", - "2023-02-20 09:00:54,331 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", - "INFO::2023-02-20 09:01::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex3/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_default.json\n", - "2023-02-20 09:01:28,191 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex3/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_default.json\n", - "INFO::2023-02-20 09:01::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex3/rlut_2.5x2.5_regrid2_metrics.json\n", - "2023-02-20 09:01:38,324 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex3/rlut_2.5x2.5_regrid2_metrics.json\n" + "2023-02-21 14:22:54,655 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:23::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex3/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-21 14:23:18,559 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex3/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-21 14:23:20,370 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:23::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex3/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "2023-02-21 14:23:44,270 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex3/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "INFO::2023-02-21 14:23::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex3/rlut_2.5x2.5_regrid2_metrics.json\n", + "2023-02-21 14:23:44,273 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex3/rlut_2.5x2.5_regrid2_metrics.json\n" ] }, { @@ -1494,7 +1549,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 9, "metadata": { "scrolled": true, "tags": [] @@ -1504,20 +1559,20 @@ "name": "stderr", "output_type": "stream", "text": [ - "2023-02-21 10:40:26,762 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", - "INFO::2023-02-21 10:40::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", - "2023-02-21 10:40:48,167 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", - "2023-02-21 10:40:48,190 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", - "INFO::2023-02-21 10:41::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", - "2023-02-21 10:41:09,002 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", - "2023-02-21 10:41:10,601 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", - "INFO::2023-02-21 10:41::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_default.json\n", - "2023-02-21 10:41:37,682 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_default.json\n", - "2023-02-21 10:41:37,700 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", - "INFO::2023-02-21 10:41::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_default.json\n", - "2023-02-21 10:41:58,764 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_default.json\n", - "INFO::2023-02-21 10:41::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut_2.5x2.5_regrid2_metrics.json\n", - "2023-02-21 10:41:58,770 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut_2.5x2.5_regrid2_metrics.json\n" + "2023-02-21 14:23:50,755 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:24::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-21 14:24:14,525 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-21 14:24:14,547 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:24::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-21 14:24:35,676 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-21 14:24:37,325 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:25::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "2023-02-21 14:25:02,327 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "2023-02-21 14:25:02,348 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:25::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "2023-02-21 14:25:26,158 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "INFO::2023-02-21 14:25::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut_2.5x2.5_regrid2_metrics.json\n", + "2023-02-21 14:25:26,171 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut_2.5x2.5_regrid2_metrics.json\n" ] }, { @@ -1698,7 +1753,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 10, "metadata": { "scrolled": true, "tags": [] @@ -1708,18 +1763,18 @@ "name": "stderr", "output_type": "stream", "text": [ - "2023-02-20 09:03:54,726 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", - "INFO::2023-02-20 09:05::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/zg_500/CanCM4_zg_500_2.5x2.5_regrid2_metrics_alternate1.json\n", - "2023-02-20 09:05:24,208 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/zg_500/CanCM4_zg_500_2.5x2.5_regrid2_metrics_alternate1.json\n", - "INFO::2023-02-20 09:05::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/zg_500_2.5x2.5_regrid2_metrics.json\n", - "2023-02-20 09:05:33,596 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/zg_500_2.5x2.5_regrid2_metrics.json\n" + "2023-02-21 14:25:33,713 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:27::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/zg_500/CanCM4_zg_500_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-21 14:27:19,315 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/zg_500/CanCM4_zg_500_2.5x2.5_regrid2_metrics_alternate1.json\n", + "INFO::2023-02-21 14:27::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/zg_500_2.5x2.5_regrid2_metrics.json\n", + "2023-02-21 14:27:29,314 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/zg_500_2.5x2.5_regrid2_metrics.json\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "case_id: Ex6 \n", + "case_id: Ex5 \n", " test_data_set: ['CanCM4'] \n", " realization: \n", " vars: ['zg_500'] \n", @@ -1736,7 +1791,7 @@ " regions: {'rlut': ['Global']} \n", " test_data_path: demo_data/CMIP5_demo_clims/ \n", " reference_data_path: demo_data/obs4MIPs_PCMDI_clims \n", - " metrics_output_path: demo_output/Ex6 \n", + " metrics_output_path: demo_output/Ex5 \n", " debug: False \n", "\n", "--- prepare mean climate metrics calculation ---\n", @@ -1855,7 +1910,7 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": null, "metadata": { "scrolled": true }, @@ -1864,148 +1919,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "2023-02-20 09:05:39,761 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", - "INFO::2023-02-20 09:06::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", - "2023-02-20 09:06:08,881 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", - "2023-02-20 09:06:08,903 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", - "INFO::2023-02-20 09:06::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", - "2023-02-20 09:06:42,569 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", - "2023-02-20 09:06:44,191 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", - "INFO::2023-02-20 09:07::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_default.json\n", - "2023-02-20 09:07:13,173 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_default.json\n", - "2023-02-20 09:07:13,196 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", - "INFO::2023-02-20 09:07::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_default.json\n", - "2023-02-20 09:07:43,353 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_default.json\n", - "INFO::2023-02-20 09:07::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/rlut_2.5x2.5_regrid2_metrics.json\n", - "2023-02-20 09:07:53,368 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/rlut_2.5x2.5_regrid2_metrics.json\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "case_id: Ex5 \n", - " test_data_set: ['ACCESS1-0', 'CanCM4'] \n", - " realization: \n", - " vars: ['rlut'] \n", - " reference_data_set: ['all'] \n", - " target_grid: 2.5x2.5 \n", - " regrid_tool: regrid2 \n", - " regrid_tool_ocn: esmf \n", - " save_test_clims: True \n", - " test_clims_interpolated_output: None \n", - " filename_template: cmip5.historical.%(model_version).r1i1p1.mon.%(variable).198101-200512.AC.v20200426.nc \n", - " sftlf_filename_template: sftlf_%(model_version).nc \n", - " generate_sftlf: True \n", - " regions_specs: {'NHEX': {'domain': {'latitude': (30.0, 90)}}, 'SHEX': {'domain': {'latitude': (-90.0, -30)}}, 'TROPICS': {'domain': {'latitude': (-30.0, 30)}}, 'global': {}, '90S50S': {'domain': {'latitude': (-90.0, -50)}}, '50S20S': {'domain': {'latitude': (-50.0, -20)}}, '20S20N': {'domain': {'latitude': (-20.0, 20)}}, '20N50N': {'domain': {'latitude': (20.0, 50)}}, '50N90N': {'domain': {'latitude': (50.0, 90)}}, 'land_NHEX': {'value': 100, 'domain': {'latitude': (30.0, 90)}}, 'land_SHEX': {'value': 100, 'domain': {'latitude': (-90.0, -30)}}, 'land_TROPICS': {'value': 100, 'domain': {'latitude': (-30.0, 30)}}, 'land': {'value': 100}, 'ocean_NHEX': {'value': 0, 'domain': {'latitude': (30.0, 90)}}, 'ocean_SHEX': {'value': 0, 'domain': {'latitude': (-90.0, -30)}}, 'ocean_TROPICS': {'value': 0, 'domain': {'latitude': (30.0, 30)}}, 'ocean': {'value': 0}, 'CONUS': {'domain': {'latitude': (24.7, 49.4), 'longitude': (-124.78, -66.92)}}, 'land_CONUS': {'value': 100, 'domain': {'latitude': (24.7, 49.4), 'longitude': (-124.78, -66.92)}}, 'NAM': {'domain': {'latitude': (20.0, 90), 'longitude': (-180, 180)}}, 'NAO': {'domain': {'latitude': (20.0, 80), 'longitude': (-90, 40)}}, 'SAM': {'domain': {'latitude': (-20.0, -90), 'longitude': (0, 360)}}, 'PNA': {'domain': {'latitude': (20.0, 85), 'longitude': (120, 240)}}, 'PDO': {'domain': {'latitude': (20.0, 70), 'longitude': (110, 260)}}, 'AllMW': {'domain': {'latitude': (-40.0, 45.0), 'longitude': (0.0, 360.0)}}, 'AllM': {'domain': {'latitude': (-45.0, 45.0), 'longitude': (0.0, 360.0)}}, 'NAMM': {'domain': {'latitude': (0.0, 45.0), 'longitude': (210.0, 310.0)}}, 'SAMM': {'domain': {'latitude': (-45.0, 0.0), 'longitude': (240.0, 330.0)}}, 'NAFM': {'domain': {'latitude': (0.0, 45.0), 'longitude': (310.0, 60.0)}}, 'SAFM': {'domain': {'latitude': (-45.0, 0.0), 'longitude': (0.0, 90.0)}}, 'ASM': {'domain': {'latitude': (0.0, 45.0), 'longitude': (60.0, 180.0)}}, 'AUSM': {'domain': {'latitude': (-45.0, 0.0), 'longitude': (90.0, 160.0)}}, 'AIR': {'domain': {'latitude': (7.0, 25.0), 'longitude': (65.0, 85.0)}}, 'AUS': {'domain': {'latitude': (-20.0, -10.0), 'longitude': (120.0, 150.0)}}, 'Sahel': {'domain': {'latitude': (13.0, 18.0), 'longitude': (-10.0, 10.0)}}, 'GoG': {'domain': {'latitude': (0.0, 5.0), 'longitude': (-10.0, 10.0)}}, 'NAmo': {'domain': {'latitude': (20.0, 37.0), 'longitude': (-112.0, -103.0)}}, 'SAmo': {'domain': {'latitude': (-20.0, 2.5), 'longitude': (-65.0, -40.0)}}} \n", - " regions: {'rlut': ['Global']} \n", - " test_data_path: demo_data/CMIP5_demo_clims/ \n", - " reference_data_path: demo_data/obs4MIPs_PCMDI_clims \n", - " metrics_output_path: demo_output/Ex5 \n", - " debug: False \n", - "\n", - "--- prepare mean climate metrics calculation ---\n", - "--- start mean climate metrics calculation ---\n", - "varname: rlut\n", - "level: None\n", - "reference_data_set (all): ['alternate1', 'default']\n", - "ref: alternate1\n", - "ref_data_full_path: demo_data/obs4MIPs_PCMDI_clims/rlut/CERES-EBAF-4-0/v20210804/rlut_mon_CERES-EBAF-4-0_PCMDI_gn.200301-201812.AC.v20210804.nc\n", - "-----------------------\n", - "model, run: ACCESS1-0 \n", - "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.ACCESS1-0.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", - "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", - "load and regrid done\n", - "region: Global\n", - "compute metrics start\n", - "var: rlut\n", - "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", - "compute_metrics, rms_xyt\n", - "compute_metrics, stdObs_xyt\n", - "compute_metrics, std_xyt\n", - "compute_metrics-CALCULATE ANNUAL MEANS\n", - "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", - "compute_metrics-CALCULATE MSE\n", - "compute_metrics-CALCULATE MEAN RMS\n", - "compute_metrics-CALCULATE MEAN CORR\n", - "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", - "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", - "compute_metrics-CALCULATE ANNUAL MEANS\n", - "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", - "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", - "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", - "-----------------------\n", - "model, run: CanCM4 \n", - "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.CanCM4.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", - "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", - "load and regrid done\n", - "region: Global\n", - "compute metrics start\n", - "var: rlut\n", - "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", - "compute_metrics, rms_xyt\n", - "compute_metrics, stdObs_xyt\n", - "compute_metrics, std_xyt\n", - "compute_metrics-CALCULATE ANNUAL MEANS\n", - "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", - "compute_metrics-CALCULATE MSE\n", - "compute_metrics-CALCULATE MEAN RMS\n", - "compute_metrics-CALCULATE MEAN CORR\n", - "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", - "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", - "compute_metrics-CALCULATE ANNUAL MEANS\n", - "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", - "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", - "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", - "ref: default\n", - "ref_data_full_path: demo_data/obs4MIPs_PCMDI_clims/rlut/CERES-EBAF-4-1/v20210804/rlut_mon_CERES-EBAF-4-1_PCMDI_gn.200301-201812.AC.v20210804.nc\n", - "-----------------------\n", - "model, run: ACCESS1-0 \n", - "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.ACCESS1-0.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", - "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", - "load and regrid done\n", - "region: Global\n", - "compute metrics start\n", - "var: rlut\n", - "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", - "compute_metrics, rms_xyt\n", - "compute_metrics, stdObs_xyt\n", - "compute_metrics, std_xyt\n", - "compute_metrics-CALCULATE ANNUAL MEANS\n", - "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", - "compute_metrics-CALCULATE MSE\n", - "compute_metrics-CALCULATE MEAN RMS\n", - "compute_metrics-CALCULATE MEAN CORR\n", - "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", - "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", - "compute_metrics-CALCULATE ANNUAL MEANS\n", - "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", - "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", - "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", - "-----------------------\n", - "model, run: CanCM4 \n", - "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.CanCM4.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", - "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", - "load and regrid done\n", - "region: Global\n", - "compute metrics start\n", - "var: rlut\n", - "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", - "compute_metrics, rms_xyt\n", - "compute_metrics, stdObs_xyt\n", - "compute_metrics, std_xyt\n", - "compute_metrics-CALCULATE ANNUAL MEANS\n", - "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", - "compute_metrics-CALCULATE MSE\n", - "compute_metrics-CALCULATE MEAN RMS\n", - "compute_metrics-CALCULATE MEAN CORR\n", - "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", - "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", - "compute_metrics-CALCULATE ANNUAL MEANS\n", - "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", - "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", - "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", - "pmp mean clim driver completed\n" + "2023-02-21 14:27:36,115 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n" ] } ], From 4a4b5b3b2db70ef5d4747d2a1ef89262936c51ea Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Tue, 21 Feb 2023 15:42:37 -0800 Subject: [PATCH 129/130] clean up --- doc/jupyter/Demo/Demo_1b_mean_climate.ipynb | 145 +++++++++++++++++++- 1 file changed, 143 insertions(+), 2 deletions(-) diff --git a/doc/jupyter/Demo/Demo_1b_mean_climate.ipynb b/doc/jupyter/Demo/Demo_1b_mean_climate.ipynb index 7689c04a9..76c1b948a 100644 --- a/doc/jupyter/Demo/Demo_1b_mean_climate.ipynb +++ b/doc/jupyter/Demo/Demo_1b_mean_climate.ipynb @@ -1910,7 +1910,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 11, "metadata": { "scrolled": true }, @@ -1919,7 +1919,148 @@ "name": "stderr", "output_type": "stream", "text": [ - "2023-02-21 14:27:36,115 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n" + "2023-02-21 14:27:36,115 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:28::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-21 14:28:06,336 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-21 14:28:06,360 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:28::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-21 14:28:37,819 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-21 14:28:39,744 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:29::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "2023-02-21 14:29:13,750 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "2023-02-21 14:29:13,771 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:29::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "2023-02-21 14:29:47,125 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "INFO::2023-02-21 14:29::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/rlut_2.5x2.5_regrid2_metrics.json\n", + "2023-02-21 14:29:57,273 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/rlut_2.5x2.5_regrid2_metrics.json\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "case_id: Ex6 \n", + " test_data_set: ['ACCESS1-0', 'CanCM4'] \n", + " realization: \n", + " vars: ['rlut'] \n", + " reference_data_set: ['all'] \n", + " target_grid: 2.5x2.5 \n", + " regrid_tool: regrid2 \n", + " regrid_tool_ocn: esmf \n", + " save_test_clims: True \n", + " test_clims_interpolated_output: None \n", + " filename_template: cmip5.historical.%(model_version).r1i1p1.mon.%(variable).198101-200512.AC.v20200426.nc \n", + " sftlf_filename_template: sftlf_%(model_version).nc \n", + " generate_sftlf: True \n", + " regions_specs: {'NHEX': {'domain': {'latitude': (30.0, 90)}}, 'SHEX': {'domain': {'latitude': (-90.0, -30)}}, 'TROPICS': {'domain': {'latitude': (-30.0, 30)}}, 'global': {}, '90S50S': {'domain': {'latitude': (-90.0, -50)}}, '50S20S': {'domain': {'latitude': (-50.0, -20)}}, '20S20N': {'domain': {'latitude': (-20.0, 20)}}, '20N50N': {'domain': {'latitude': (20.0, 50)}}, '50N90N': {'domain': {'latitude': (50.0, 90)}}, 'land_NHEX': {'value': 100, 'domain': {'latitude': (30.0, 90)}}, 'land_SHEX': {'value': 100, 'domain': {'latitude': (-90.0, -30)}}, 'land_TROPICS': {'value': 100, 'domain': {'latitude': (-30.0, 30)}}, 'land': {'value': 100}, 'ocean_NHEX': {'value': 0, 'domain': {'latitude': (30.0, 90)}}, 'ocean_SHEX': {'value': 0, 'domain': {'latitude': (-90.0, -30)}}, 'ocean_TROPICS': {'value': 0, 'domain': {'latitude': (30.0, 30)}}, 'ocean': {'value': 0}, 'CONUS': {'domain': {'latitude': (24.7, 49.4), 'longitude': (-124.78, -66.92)}}, 'land_CONUS': {'value': 100, 'domain': {'latitude': (24.7, 49.4), 'longitude': (-124.78, -66.92)}}, 'NAM': {'domain': {'latitude': (20.0, 90), 'longitude': (-180, 180)}}, 'NAO': {'domain': {'latitude': (20.0, 80), 'longitude': (-90, 40)}}, 'SAM': {'domain': {'latitude': (-20.0, -90), 'longitude': (0, 360)}}, 'PNA': {'domain': {'latitude': (20.0, 85), 'longitude': (120, 240)}}, 'PDO': {'domain': {'latitude': (20.0, 70), 'longitude': (110, 260)}}, 'AllMW': {'domain': {'latitude': (-40.0, 45.0), 'longitude': (0.0, 360.0)}}, 'AllM': {'domain': {'latitude': (-45.0, 45.0), 'longitude': (0.0, 360.0)}}, 'NAMM': {'domain': {'latitude': (0.0, 45.0), 'longitude': (210.0, 310.0)}}, 'SAMM': {'domain': {'latitude': (-45.0, 0.0), 'longitude': (240.0, 330.0)}}, 'NAFM': {'domain': {'latitude': (0.0, 45.0), 'longitude': (310.0, 60.0)}}, 'SAFM': {'domain': {'latitude': (-45.0, 0.0), 'longitude': (0.0, 90.0)}}, 'ASM': {'domain': {'latitude': (0.0, 45.0), 'longitude': (60.0, 180.0)}}, 'AUSM': {'domain': {'latitude': (-45.0, 0.0), 'longitude': (90.0, 160.0)}}, 'AIR': {'domain': {'latitude': (7.0, 25.0), 'longitude': (65.0, 85.0)}}, 'AUS': {'domain': {'latitude': (-20.0, -10.0), 'longitude': (120.0, 150.0)}}, 'Sahel': {'domain': {'latitude': (13.0, 18.0), 'longitude': (-10.0, 10.0)}}, 'GoG': {'domain': {'latitude': (0.0, 5.0), 'longitude': (-10.0, 10.0)}}, 'NAmo': {'domain': {'latitude': (20.0, 37.0), 'longitude': (-112.0, -103.0)}}, 'SAmo': {'domain': {'latitude': (-20.0, 2.5), 'longitude': (-65.0, -40.0)}}} \n", + " regions: {'rlut': ['Global']} \n", + " test_data_path: demo_data/CMIP5_demo_clims/ \n", + " reference_data_path: demo_data/obs4MIPs_PCMDI_clims \n", + " metrics_output_path: demo_output/Ex6 \n", + " debug: False \n", + "\n", + "--- prepare mean climate metrics calculation ---\n", + "--- start mean climate metrics calculation ---\n", + "varname: rlut\n", + "level: None\n", + "reference_data_set (all): ['alternate1', 'default']\n", + "ref: alternate1\n", + "ref_data_full_path: demo_data/obs4MIPs_PCMDI_clims/rlut/CERES-EBAF-4-0/v20210804/rlut_mon_CERES-EBAF-4-0_PCMDI_gn.200301-201812.AC.v20210804.nc\n", + "-----------------------\n", + "model, run: ACCESS1-0 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.ACCESS1-0.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: Global\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "-----------------------\n", + "model, run: CanCM4 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.CanCM4.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: Global\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "ref: default\n", + "ref_data_full_path: demo_data/obs4MIPs_PCMDI_clims/rlut/CERES-EBAF-4-1/v20210804/rlut_mon_CERES-EBAF-4-1_PCMDI_gn.200301-201812.AC.v20210804.nc\n", + "-----------------------\n", + "model, run: ACCESS1-0 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.ACCESS1-0.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: Global\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "-----------------------\n", + "model, run: CanCM4 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.CanCM4.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: Global\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "pmp mean clim driver completed\n" ] } ], From f543015933114ad05818bb64f8490dc49aa9d928 Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Tue, 21 Feb 2023 18:25:22 -0800 Subject: [PATCH 130/130] enable `save_test_clims` options --- .../lib/create_mean_climate_parser.py | 8 ++++++ .../mean_climate/mean_climate_driver.py | 27 ++++++++++++++++--- 2 files changed, 31 insertions(+), 4 deletions(-) diff --git a/pcmdi_metrics/mean_climate/lib/create_mean_climate_parser.py b/pcmdi_metrics/mean_climate/lib/create_mean_climate_parser.py index 85dbf3742..d06447692 100644 --- a/pcmdi_metrics/mean_climate/lib/create_mean_climate_parser.py +++ b/pcmdi_metrics/mean_climate/lib/create_mean_climate_parser.py @@ -186,6 +186,14 @@ def create_mean_climate_parser(): help="Directory of where to put the results", required=False, ) + + parser.add_argument( + "--diagnostics_output_path", + dest="diagnostics_output_path", + help="Directory of where to put the results", + default=None, + required=False, + ) parser.add_argument( "--filename_output_template", diff --git a/pcmdi_metrics/mean_climate/mean_climate_driver.py b/pcmdi_metrics/mean_climate/mean_climate_driver.py index b1c319e35..476133de0 100755 --- a/pcmdi_metrics/mean_climate/mean_climate_driver.py +++ b/pcmdi_metrics/mean_climate/mean_climate_driver.py @@ -43,11 +43,19 @@ regions = parameter.regions test_data_path = parameter.test_data_path reference_data_path = parameter.reference_data_path -metrics_output_path = parameter.metrics_output_path.replace('%(case_id)', case_id) - +metrics_output_path = parameter.metrics_output_path +diagnostics_output_path = parameter.diagnostics_output_path debug = parameter.debug cmec = parameter.cmec +if metrics_output_path is not None: + metrics_output_path = parameter.metrics_output_path.replace('%(case_id)', case_id) + +if diagnostics_output_path is None: + diagnostics_output_path = metrics_output_path.replace('metrics_results', 'diagnostic_results') + +diagnostics_output_path = diagnostics_output_path.replace('%(case_id)', case_id) + find_all_realizations = False if realization is None: realization = "" @@ -84,6 +92,7 @@ 'test_data_path:', test_data_path, '\n', 'reference_data_path:', reference_data_path, '\n', 'metrics_output_path:', metrics_output_path, '\n', + 'diagnostics_output_path:', diagnostics_output_path, '\n', 'debug:', debug, '\n') print('--- prepare mean climate metrics calculation ---') @@ -223,11 +232,21 @@ if region not in list(ds_ref_dict.keys()): ds_ref_dict[region] = region_subset(ds_ref_tmp, regions_specs, region=region) print('spatial subset done') + + if save_test_clims and ref == reference_data_set[0]: + test_clims_dir = os.path.join( + diagnostics_output_path, var, 'interpolated_model_clims') + os.makedirs(test_clims_dir, exist_ok=True) + test_clims_file = os.path.join( + test_clims_dir, + '_'.join([var, model, run, 'interpolated', regrid_tool, region, 'AC', case_id + '.nc'])) + ds_test_dict[region].to_netcdf(test_clims_file) if debug: print('ds_test_tmp:', ds_test_tmp) - ds_test_dict[region].to_netcdf('_'.join([var, 'model', region + '.nc'])) - ds_ref_dict[region].to_netcdf('_'.join([var, 'ref', region + '.nc'])) + ds_test_dict[region].to_netcdf('_'.join([var, 'model', model, run, region + '.nc'])) + if model == test_data_set[0] and run == realizations[0]: + ds_ref_dict[region].to_netcdf('_'.join([var, 'ref', region + '.nc'])) # compute metrics print('compute metrics start')