diff --git a/cmec/scripts/climatologies.py b/cmec/scripts/climatologies.py index 444be8cb8..84067cc28 100644 --- a/cmec/scripts/climatologies.py +++ b/cmec/scripts/climatologies.py @@ -2,7 +2,7 @@ import subprocess import sys -import genutil +from pcmdi_metrics.utils import StringConstructor def make_climatologies(settings, model_dir, wk_dir): @@ -12,7 +12,7 @@ def make_climatologies(settings, model_dir, wk_dir): realization = settings.get("realization", "") period = settings.get("period", "") tmp = os.path.join(model_dir, filename_template) - model_file = genutil.StringConstructor(tmp) + model_file = StringConstructor(tmp) model_file.period = period model_file.realization = realization out_base = os.path.join(wk_dir, "AC") diff --git a/doc/jupyter/Demo/Demo_2a_monsoon_wang.ipynb b/doc/jupyter/Demo/Demo_2a_monsoon_wang.ipynb index 7f1abc627..50ce366cf 100644 --- a/doc/jupyter/Demo/Demo_2a_monsoon_wang.ipynb +++ b/doc/jupyter/Demo/Demo_2a_monsoon_wang.ipynb @@ -82,7 +82,8 @@ "results_dir = 'demo_output/monsoon_wang'\n", "\n", "# Threshold\n", - "threshold = 2.5 / 86400\n" + "threshold = 2.5 / 86400\n", + "\n" ] } ], @@ -110,7 +111,6 @@ "name": "stdout", "output_type": "stream", "text": [ - "Deprecation warning: please use 'import pcmdi_metrics.driver.pmp_parser.PMPParser'\n", "******************************************************************************************\n", "demo_data/CMIP5_demo_clims/cmip5.historical.CanCM4.r1i1p1.mon.pr.198101-200512.AC.v20200426.nc\n" ] @@ -119,7 +119,14 @@ "name": "stderr", "output_type": "stream", "text": [ - "INFO::2021-11-15 14:52::pcmdi_metrics:: Results saved to a json file: /Users/ordonez4/Documents/git/pcmdi_metrics/doc/jupyter/Demo/demo_output/monsoon_wang/monsoon_wang.json\n" + "/Users/lee1043/mambaforge/envs/pmp_devel_20230223/lib/python3.9/site-packages/cdms2/avariable.py:1289: Warning: \n", + "avariable.regrid: regridTool = 'esmf' but your version does not\n", + "seems to be built with esmf, will switch to regridTool = 'libcf'\n", + " \n", + " warnings.warn(message, Warning)\n", + "INFO::2023-12-18 12:39::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20230620_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/monsoon_wang/monsoon_wang.json\n", + "2023-12-18 12:39:49,908 [INFO]: base.py(write:250) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20230620_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/monsoon_wang/monsoon_wang.json\n", + "2023-12-18 12:39:49,908 [INFO]: base.py(write:250) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20230620_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/monsoon_wang/monsoon_wang.json\n" ] } ], @@ -148,42 +155,42 @@ " \"CanCM4\": {\n", " \"AllMW\": {\n", " \"cor\": \"0.754\",\n", - " \"rmsn\": \"0.692\",\n", - " \"threat_score\": \"0.477\"\n", + " \"rmsn\": \"0.691\",\n", + " \"threat_score\": \"0.479\"\n", " },\n", " \"AllM\": {\n", " \"cor\": \"0.757\",\n", - " \"rmsn\": \"0.691\",\n", - " \"threat_score\": \"0.477\"\n", + " \"rmsn\": \"0.689\",\n", + " \"threat_score\": \"0.479\"\n", " },\n", " \"NAMM\": {\n", - " \"cor\": \"0.788\",\n", - " \"rmsn\": \"0.656\",\n", + " \"cor\": \"0.791\",\n", + " \"rmsn\": \"0.650\",\n", " \"threat_score\": \"0.474\"\n", " },\n", " \"SAMM\": {\n", " \"cor\": \"0.770\",\n", - " \"rmsn\": \"0.698\",\n", - " \"threat_score\": \"0.459\"\n", + " \"rmsn\": \"0.695\",\n", + " \"threat_score\": \"0.456\"\n", " },\n", " \"NAFM\": {\n", - " \"cor\": \"0.775\",\n", - " \"rmsn\": \"0.649\",\n", - " \"threat_score\": \"0.409\"\n", + " \"cor\": \"0.776\",\n", + " \"rmsn\": \"0.646\",\n", + " \"threat_score\": \"0.411\"\n", " },\n", " \"SAFM\": {\n", - " \"cor\": \"0.782\",\n", - " \"rmsn\": \"0.673\",\n", + " \"cor\": \"0.780\",\n", + " \"rmsn\": \"0.674\",\n", " \"threat_score\": \"0.645\"\n", " },\n", " \"ASM\": {\n", - " \"cor\": \"0.724\",\n", - " \"rmsn\": \"0.715\",\n", - " \"threat_score\": \"0.403\"\n", + " \"cor\": \"0.726\",\n", + " \"rmsn\": \"0.713\",\n", + " \"threat_score\": \"0.405\"\n", " },\n", " \"AUSM\": {\n", " \"cor\": \"0.835\",\n", - " \"rmsn\": \"0.580\",\n", + " \"rmsn\": \"0.578\",\n", " \"threat_score\": \"0.523\"\n", " }\n", " }\n", @@ -222,7 +229,6 @@ "name": "stdout", "output_type": "stream", "text": [ - "Deprecation warning: please use 'import pcmdi_metrics.driver.pmp_parser.PMPParser'\n", "******************************************************************************************\n", "demo_data/CMIP5_demo_clims/cmip5.historical.CanCM4.r1i1p1.mon.pr.198101-200512.AC.v20200426.nc\n" ] @@ -231,7 +237,14 @@ "name": "stderr", "output_type": "stream", "text": [ - "INFO::2021-11-15 14:52::pcmdi_metrics:: Results saved to a json file: /Users/ordonez4/Documents/git/pcmdi_metrics/doc/jupyter/Demo/demo_output/monsoon_wang/monsoon_wang_ex2.json\n" + "/Users/lee1043/mambaforge/envs/pmp_devel_20230223/lib/python3.9/site-packages/cdms2/avariable.py:1289: Warning: \n", + "avariable.regrid: regridTool = 'esmf' but your version does not\n", + "seems to be built with esmf, will switch to regridTool = 'libcf'\n", + " \n", + " warnings.warn(message, Warning)\n", + "INFO::2023-12-18 12:40::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20230620_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/monsoon_wang/monsoon_wang_ex2.json\n", + "2023-12-18 12:40:38,357 [INFO]: base.py(write:250) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20230620_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/monsoon_wang/monsoon_wang_ex2.json\n", + "2023-12-18 12:40:38,357 [INFO]: base.py(write:250) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20230620_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/monsoon_wang/monsoon_wang_ex2.json\n" ] } ], @@ -265,43 +278,43 @@ " \"CanCM4\": {\n", " \"AllMW\": {\n", " \"cor\": \"0.754\",\n", - " \"rmsn\": \"0.692\",\n", + " \"rmsn\": \"0.691\",\n", " \"threat_score\": \"0.457\"\n", " },\n", " \"AllM\": {\n", " \"cor\": \"0.757\",\n", - " \"rmsn\": \"0.691\",\n", + " \"rmsn\": \"0.689\",\n", " \"threat_score\": \"0.457\"\n", " },\n", " \"NAMM\": {\n", - " \"cor\": \"0.788\",\n", - " \"rmsn\": \"0.656\",\n", - " \"threat_score\": \"0.476\"\n", + " \"cor\": \"0.791\",\n", + " \"rmsn\": \"0.650\",\n", + " \"threat_score\": \"0.473\"\n", " },\n", " \"SAMM\": {\n", " \"cor\": \"0.770\",\n", - " \"rmsn\": \"0.698\",\n", - " \"threat_score\": \"0.466\"\n", + " \"rmsn\": \"0.695\",\n", + " \"threat_score\": \"0.458\"\n", " },\n", " \"NAFM\": {\n", - " \"cor\": \"0.775\",\n", - " \"rmsn\": \"0.649\",\n", - " \"threat_score\": \"0.385\"\n", + " \"cor\": \"0.776\",\n", + " \"rmsn\": \"0.646\",\n", + " \"threat_score\": \"0.387\"\n", " },\n", " \"SAFM\": {\n", - " \"cor\": \"0.782\",\n", - " \"rmsn\": \"0.673\",\n", - " \"threat_score\": \"0.660\"\n", + " \"cor\": \"0.780\",\n", + " \"rmsn\": \"0.674\",\n", + " \"threat_score\": \"0.667\"\n", " },\n", " \"ASM\": {\n", - " \"cor\": \"0.724\",\n", - " \"rmsn\": \"0.715\",\n", - " \"threat_score\": \"0.365\"\n", + " \"cor\": \"0.726\",\n", + " \"rmsn\": \"0.713\",\n", + " \"threat_score\": \"0.368\"\n", " },\n", " \"AUSM\": {\n", " \"cor\": \"0.835\",\n", - " \"rmsn\": \"0.580\",\n", - " \"threat_score\": \"0.429\"\n", + " \"rmsn\": \"0.578\",\n", + " \"threat_score\": \"0.435\"\n", " }\n", " }\n", "}\n" @@ -324,7 +337,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3", + "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, @@ -338,7 +351,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.6" + "version": "3.9.7" }, "selected_variables": [], "vcdat_file_path": "", diff --git a/pcmdi_metrics/enso/enso_driver.py b/pcmdi_metrics/enso/enso_driver.py index ef4491ba6..d1981f171 100755 --- a/pcmdi_metrics/enso/enso_driver.py +++ b/pcmdi_metrics/enso/enso_driver.py @@ -15,7 +15,6 @@ defCollection, ) from EnsoMetrics.EnsoComputeMetricsLib import ComputeCollection -from genutil import StringConstructor from pcmdi_metrics import resources from pcmdi_metrics.enso.lib import ( @@ -25,8 +24,8 @@ get_file, match_obs_name, metrics_to_json, - sort_human, ) +from pcmdi_metrics.utils import StringConstructor, sort_human # To avoid below error when using multi cores # OpenBLAS blas_thread_init: pthread_create failed for thread XX of 96: Resource temporarily unavailable diff --git a/pcmdi_metrics/enso/scripts_pcmdi/enso_driver_obsOnly.py b/pcmdi_metrics/enso/scripts_pcmdi/enso_driver_obsOnly.py index 74bb74ee1..5f3350af1 100755 --- a/pcmdi_metrics/enso/scripts_pcmdi/enso_driver_obsOnly.py +++ b/pcmdi_metrics/enso/scripts_pcmdi/enso_driver_obsOnly.py @@ -2,18 +2,16 @@ # ================================================= # Dependencies # ------------------------------------------------- -from __future__ import print_function - import glob import json import os from EnsoMetrics.EnsoCollectionsLib import ReferenceObservations, defCollection from EnsoMetrics.EnsoComputeMetricsLib import ComputeCollection_ObsOnly -from genutil import StringConstructor from pcmdi_metrics import resources from pcmdi_metrics.enso.lib import AddParserArgument, metrics_to_json +from pcmdi_metrics.utils import StringConstructor # To avoid below error when using multi cores # OpenBLAS blas_thread_init: pthread_create failed for thread XX of 96: Resource temporarily unavailable diff --git a/pcmdi_metrics/enso/scripts_pcmdi/parallel_driver.py b/pcmdi_metrics/enso/scripts_pcmdi/parallel_driver.py index 5a030dc97..38de071ed 100755 --- a/pcmdi_metrics/enso/scripts_pcmdi/parallel_driver.py +++ b/pcmdi_metrics/enso/scripts_pcmdi/parallel_driver.py @@ -13,11 +13,9 @@ import glob import os -from genutil import StringConstructor - from pcmdi_metrics.enso.lib import AddParserArgument, find_realm from pcmdi_metrics.misc.scripts import parallel_submitter -from pcmdi_metrics.variability_mode.lib import sort_human +from pcmdi_metrics.utils import StringConstructor, sort_human # ================================================= # Collect user defined options diff --git a/pcmdi_metrics/enso/scripts_pcmdi/post_process_merge_jsons.py b/pcmdi_metrics/enso/scripts_pcmdi/post_process_merge_jsons.py index 5f8b1a2ce..e0894e6f8 100755 --- a/pcmdi_metrics/enso/scripts_pcmdi/post_process_merge_jsons.py +++ b/pcmdi_metrics/enso/scripts_pcmdi/post_process_merge_jsons.py @@ -7,8 +7,7 @@ import json import os -from genutil import StringConstructor - +from pcmdi_metrics.utils import StringConstructor from pcmdi_metrics.variability_mode.lib import dict_merge diff --git a/pcmdi_metrics/graphics/deprecated/portraits.py b/pcmdi_metrics/graphics/deprecated/portraits.py index 474d6aca7..6d922b2b1 100644 --- a/pcmdi_metrics/graphics/deprecated/portraits.py +++ b/pcmdi_metrics/graphics/deprecated/portraits.py @@ -13,7 +13,8 @@ import numpy import pkg_resources import vcs -from genutil import StringConstructor + +from pcmdi_metrics.utils import StringConstructor pmp_egg_path = pkg_resources.resource_filename( pkg_resources.Requirement.parse("pcmdi_metrics"), "share" diff --git a/pcmdi_metrics/io/base.py b/pcmdi_metrics/io/base.py index 003f2313f..85bbe1eeb 100755 --- a/pcmdi_metrics/io/base.py +++ b/pcmdi_metrics/io/base.py @@ -19,6 +19,7 @@ import pcmdi_metrics from pcmdi_metrics import LOG_LEVEL +from pcmdi_metrics.utils import StringConstructor value = 0 cdms2.setNetcdfShuffleFlag(value) # where value is either 0 or 1 @@ -143,9 +144,9 @@ def default(self, o): return {o.id: "cdutil.region.domain(%s)" % args} -class Base(cdp.cdp_io.CDPIO, genutil.StringConstructor): +class Base(cdp.cdp_io.CDPIO, StringConstructor): def __init__(self, root, file_template, file_mask_template=None): - genutil.StringConstructor.__init__(self, root + "/" + file_template) + StringConstructor.__init__(self, root + "/" + file_template) self.target_grid = None self.mask = None self.target_mask = None @@ -156,7 +157,7 @@ def __init__(self, root, file_template, file_mask_template=None): self.setup_cdms2() def __call__(self): - path = os.path.abspath(genutil.StringConstructor.__call__(self)) + path = os.path.abspath(StringConstructor.__call__(self)) if self.type in path: return path else: diff --git a/pcmdi_metrics/mean_climate/lib/pmp_parameter.py b/pcmdi_metrics/mean_climate/lib/pmp_parameter.py index e993c967b..b3ee9c9d0 100755 --- a/pcmdi_metrics/mean_climate/lib/pmp_parameter.py +++ b/pcmdi_metrics/mean_climate/lib/pmp_parameter.py @@ -2,9 +2,9 @@ import os import cdp.cdp_parameter -import genutil from pcmdi_metrics import LOG_LEVEL +from pcmdi_metrics.utils import StringConstructor try: basestring # noqa @@ -17,12 +17,12 @@ def __init__(self): logging.getLogger("pmp").setLevel(LOG_LEVEL) def process_templated_argument(self, name, default_value="*", extras=None): - """Applies arg parse values to a genutil.StringConstructor template type argument + """Applies arg parse values to a StringConstructor template type argument Input: name: name of the argument to process extra: other object(s) to get keys from, superseeds argparse object Output: - formatted argument as a genutil.StringConstructor + formatted argument as a StringConstructor """ process = getattr(self, name, None) @@ -31,7 +31,7 @@ def process_templated_argument(self, name, default_value="*", extras=None): ): # Ok not an argument from arg_parse maybe a template or string constructor itself if isinstance(name, basestring): process = name - elif isinstance(name, genutil.StringConstructor): + elif isinstance(name, StringConstructor): process = name.template else: raise RuntimeError( @@ -49,7 +49,7 @@ def process_templated_argument(self, name, default_value="*", extras=None): sources = [extras] sources.insert(0, self) # will use itself as default source - process = genutil.StringConstructor(process) + process = StringConstructor(process) for key in process.keys(): for source in sources: setattr(process, key, getattr(source, key, default_value)) diff --git a/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py b/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py index 65fa04f4e..cf12eeddd 100755 --- a/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py +++ b/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py @@ -2,10 +2,9 @@ import datetime -from genutil import StringConstructor - from pcmdi_metrics.mean_climate.lib import calculate_climatology from pcmdi_metrics.mean_climate.lib.pmp_parser import PMPMetricsParser +from pcmdi_metrics.utils import StringConstructor ver = datetime.datetime.now().strftime("v%Y%m%d") diff --git a/pcmdi_metrics/mean_climate/scripts/pcmdi_compute_climatologies-CMOR.py b/pcmdi_metrics/mean_climate/scripts/pcmdi_compute_climatologies-CMOR.py index 11e81c7f7..a33e5d115 100644 --- a/pcmdi_metrics/mean_climate/scripts/pcmdi_compute_climatologies-CMOR.py +++ b/pcmdi_metrics/mean_climate/scripts/pcmdi_compute_climatologies-CMOR.py @@ -9,10 +9,10 @@ import cdp import cdtime import cdutil -import genutil import numpy from pcmdi_metrics.mean_climate.lib.pmp_parser import PMPParser +from pcmdi_metrics.utils import StringConstructor try: import cmor @@ -201,7 +201,7 @@ def load_parser(parser): for A in As: for tmpl in [A.modpath, A.filename_template, A.output_filename_template]: - con = genutil.StringConstructor(tmpl) + con = StringConstructor(tmpl) print("TEMPLE:", con.template) for k in con.keys(): print("ADDING OPTION:", k) @@ -648,7 +648,7 @@ def runClim(A): print(B1.tocomp(cal), "<", t, "<", B2.tocomp(cal)) bounds.append([B1.torel(Tunits, cal).value, B2.torel(Tunits, cal).value]) - fnmout = genutil.StringConstructor(A.output_filename_template) + fnmout = StringConstructor(A.output_filename_template) if "model_id" in fnmout.keys(): model_id = checkCMORAttribute("model_id") diff --git a/pcmdi_metrics/mean_climate/scripts/post_process_merge_jsons.py b/pcmdi_metrics/mean_climate/scripts/post_process_merge_jsons.py index 02653f8a1..d47217ad7 100755 --- a/pcmdi_metrics/mean_climate/scripts/post_process_merge_jsons.py +++ b/pcmdi_metrics/mean_climate/scripts/post_process_merge_jsons.py @@ -5,8 +5,7 @@ import json import os -from genutil import StringConstructor - +from pcmdi_metrics.utils import StringConstructor from pcmdi_metrics.variability_mode.lib import dict_merge diff --git a/pcmdi_metrics/mjo/lib/lib_mjo.py b/pcmdi_metrics/mjo/lib/lib_mjo.py index 70e8b87f6..7e3cb58dd 100644 --- a/pcmdi_metrics/mjo/lib/lib_mjo.py +++ b/pcmdi_metrics/mjo/lib/lib_mjo.py @@ -297,9 +297,7 @@ def mjo_metrics_to_json( outdir, json_filename, result_dict, model=None, run=None, cmec_flag=False ): # Open JSON - JSON = pcmdi_metrics.io.base.Base( - outdir(output_type="metrics_results"), json_filename - ) + JSON = pcmdi_metrics.io.base.Base(outdir, json_filename) # Dict for JSON if model is None and run is None: result_dict_to_json = result_dict diff --git a/pcmdi_metrics/mjo/lib/mjo_metric_calc.py b/pcmdi_metrics/mjo/lib/mjo_metric_calc.py index a7a1c87c8..88b1a16e2 100644 --- a/pcmdi_metrics/mjo/lib/mjo_metric_calc.py +++ b/pcmdi_metrics/mjo/lib/mjo_metric_calc.py @@ -38,7 +38,7 @@ def mjo_metric_ewr_calculation( startYear, endYear, segmentLength, - outdir, + dir_paths, season="NDJFMA", ): # Open file to read daily dataset @@ -107,8 +107,8 @@ def mjo_metric_ewr_calculation( daSeaCyc.setAxis(2, lon) segment_ano[year].setAxis(1, lat) segment_ano[year].setAxis(2, lon) - """ - Space-time power spectra + + """ Space-time power spectra Handle each segment (i.e. each year) separately. 1. Get daily time series (3D: time and spatial 2D) @@ -117,6 +117,7 @@ def mjo_metric_ewr_calculation( 4. Proceed 2-D FFT to get power. Then get multi-year averaged power after the year loop. """ + # Define array for archiving power from each year segment Power = np.zeros((numYear, NT + 1, NL + 1), np.float) @@ -150,62 +151,37 @@ def mjo_metric_ewr_calculation( print("west power: ", westPower) # Output - output_filename = "{}_{}_{}_{}_{}_{}-{}_{}".format( - mip, model, exp, run, "mjo", startYear, endYear, season - ) + output_filename = f"{mip}_{model}_{exp}_{run}_mjo_{startYear}-{endYear}_{season}" if cmmGrid: output_filename += "_cmmGrid" # NetCDF output if nc_out: - os.makedirs(outdir(output_type="diagnostic_results"), exist_ok=True) - fout = os.path.join(outdir(output_type="diagnostic_results"), output_filename) + os.makedirs(dir_paths["diagnostic_results"], exist_ok=True) + fout = os.path.join(dir_paths["diagnostic_results"], output_filename) write_netcdf_output(OEE, fout) # Plot if plot: - os.makedirs(outdir(output_type="graphics"), exist_ok=True) - fout = os.path.join(outdir(output_type="graphics"), output_filename) + os.makedirs(dir_paths["graphics"], exist_ok=True) + fout = os.path.join(dir_paths["graphics"], output_filename) if model == "obs": - title = ( - " OBS (" - + run - + ") \n" - + var.capitalize() - + ", " - + season - + " " - + str(startYear) - + "-" - + str(endYear) - ) + title = f"OBS ({run})\n{var.capitalize()}, {season} {startYear}-{endYear}" else: - title = ( - mip.upper() - + ": " - + model - + " (" - + run - + ") \n" - + var.capitalize() - + ", " - + season - + " " - + str(startYear) - + "-" - + str(endYear) - ) + title = f"{mip.upper()}: {model} ({run})\n{var.capitalize()}, {season} {startYear}-{endYear}" + if cmmGrid: title += ", common grid (2.5x2.5deg)" plot_power(OEE, title, fout, ewr) # Output to JSON - metrics_result = {} - metrics_result["east_power"] = eastPower - metrics_result["west_power"] = westPower - metrics_result["east_west_power_ratio"] = ewr - metrics_result["analysis_time_window_start_year"] = startYear - metrics_result["analysis_time_window_end_year"] = endYear + metrics_result = { + "east_power": eastPower, + "west_power": westPower, + "east_west_power_ratio": ewr, + "analysis_time_window_start_year": startYear, + "analysis_time_window_end_year": endYear, + } # Debug checking plot if debug and plot: diff --git a/pcmdi_metrics/mjo/mjo_metrics_driver.py b/pcmdi_metrics/mjo/mjo_metrics_driver.py index 5e6028612..4aed47835 100755 --- a/pcmdi_metrics/mjo/mjo_metrics_driver.py +++ b/pcmdi_metrics/mjo/mjo_metrics_driver.py @@ -39,11 +39,8 @@ import sys import time from argparse import RawTextHelpFormatter -from collections import defaultdict from shutil import copyfile -from genutil import StringConstructor - import pcmdi_metrics from pcmdi_metrics.mean_climate.lib import pmp_parser from pcmdi_metrics.mjo.lib import ( @@ -52,10 +49,7 @@ mjo_metric_ewr_calculation, mjo_metrics_to_json, ) - -# To avoid below error -# OpenBLAS blas_thread_init: pthread_create failed for thread XX of 96: Resource temporarily unavailable -# os.environ['OPENBLAS_NUM_THREADS'] = '1' +from pcmdi_metrics.utils import fill_template, tree # Must be done before any CDAT library is called. # https://github.com/CDAT/cdat/issues/2213 @@ -100,18 +94,20 @@ reference_data_path = param.reference_data_path # Path to model data as string template -modpath = param.process_templated_argument("modpath") +modpath = param.modpath # Check given model option models = param.modnames # Include all models if conditioned if ("all" in [m.lower() for m in models]) or (models == "all"): - model_index_path = re.split(". |_", param.modpath.split("/")[-1]).index("%(model)") + model_index_path = re.split(". |_", modpath.split("/")[-1]).index("%(model)") models = [ re.split(". |_", p.split("/")[-1])[model_index_path] for p in glob.glob( - modpath(mip=mip, exp=exp, model="*", realization="*", variable=varModel) + fill_template( + modpath, mip=mip, exp=exp, model="*", realization="*", variable=varModel + ) ) ] # remove duplicates @@ -126,18 +122,26 @@ # case id case_id = param.case_id -# Output -outdir_template = param.process_templated_argument("results_dir") -outdir = StringConstructor( - str( - outdir_template(output_type="%(output_type)", mip=mip, exp=exp, case_id=case_id) - ) -) +# Output directory +outdir_template = param.results_dir + +# Create output directories +output_types = ["graphics", "diagnostic_results", "metrics_results"] +dir_paths = {} -# Create output directory -for output_type in ["graphics", "diagnostic_results", "metrics_results"]: - os.makedirs(outdir(output_type=output_type), exist_ok=True) - print(outdir(output_type=output_type)) +print("output directories:") + +for output_type in output_types: + dir_path = fill_template( + outdir_template, + output_type=output_type, + mip=mip, + exp=exp, + case_id=case_id, + ) + os.makedirs(dir_path, exist_ok=True) + print(output_type, ":", dir_path) + dir_paths[output_type] = dir_path # Generate CMEC compliant json if hasattr(param, "cmec"): @@ -175,21 +179,15 @@ # ================================================= # Declare dictionary for .json record # ------------------------------------------------- - - -def tree(): - return defaultdict(tree) - - result_dict = tree() # Define output json file json_filename = "_".join( ["mjo_stat", mip, exp, fq, realm, str(msyear) + "-" + str(meyear)] ) -json_file = os.path.join(outdir(output_type="metrics_results"), json_filename + ".json") +json_file = os.path.join(dir_paths["metrics_results"], json_filename + ".json") json_file_org = os.path.join( - outdir(output_type="metrics_results"), + dir_paths["metrics_results"], "_".join([json_filename, "org", str(os.getpid())]) + ".json", ) @@ -236,7 +234,8 @@ def tree(): eyear = meyear # variable data model_path_list = glob.glob( - modpath( + fill_template( + modpath, mip=mip, exp=exp, realm="atmos", @@ -262,9 +261,9 @@ def tree(): run = reference_data_name else: if realization in ["all", "All", "ALL", "*"]: - run_index = re.split( - ". |_", param.modpath.split("/")[-1] - ).index("%(realization)") + run_index = re.split(". |_", modpath.split("/")[-1]).index( + "%(realization)" + ) run = re.split(". |_", model_path.split("/")[-1])[run_index] else: run = realization @@ -299,7 +298,7 @@ def tree(): syear, eyear, segmentLength, - outdir, + dir_paths, season=season, ) @@ -335,7 +334,11 @@ def tree(): ] ) mjo_metrics_to_json( - outdir, json_filename_tmp, result_dict, model=model, run=run + dir_paths["metrics_results"], + json_filename_tmp, + result_dict, + model=model, + run=run, ) # ================================================= # Write dictionary to json file @@ -343,7 +346,7 @@ def tree(): # ------------------------------------------------- if not parallel: JSON = pcmdi_metrics.io.base.Base( - outdir(output_type="metrics_results"), json_filename + dir_paths["metrics_results"], json_filename ) JSON.write( result_dict, @@ -355,12 +358,14 @@ def tree(): if cmec: JSON.write_cmec(indent=4, separators=(",", ": ")) print("Done") + except Exception as err: if debug: raise else: print("warning: failed for ", model, run, err) pass + # --- Realization loop end except Exception as err: @@ -369,6 +374,7 @@ def tree(): else: print("warning: failed for ", model, err) pass + # --- Model loop end sys.exit(0) diff --git a/pcmdi_metrics/mjo/scripts/parallel_driver.py b/pcmdi_metrics/mjo/scripts/parallel_driver.py index a0548614d..995a29150 100755 --- a/pcmdi_metrics/mjo/scripts/parallel_driver.py +++ b/pcmdi_metrics/mjo/scripts/parallel_driver.py @@ -6,11 +6,10 @@ import os from argparse import RawTextHelpFormatter -from genutil import StringConstructor - from pcmdi_metrics.mean_climate.lib.pmp_parser import PMPParser from pcmdi_metrics.misc.scripts import parallel_submitter from pcmdi_metrics.mjo.lib import AddParserArgument +from pcmdi_metrics.utils import StringConstructor from pcmdi_metrics.variability_mode.lib import sort_human # ================================================= diff --git a/pcmdi_metrics/mjo/scripts/post_process_merge_jsons.py b/pcmdi_metrics/mjo/scripts/post_process_merge_jsons.py index e8e0a9c49..45e62bddf 100755 --- a/pcmdi_metrics/mjo/scripts/post_process_merge_jsons.py +++ b/pcmdi_metrics/mjo/scripts/post_process_merge_jsons.py @@ -1,15 +1,12 @@ #!/usr/bin/env python -from __future__ import print_function - import copy import glob import json import os -from genutil import StringConstructor - from pcmdi_metrics.mjo.lib import dict_merge +from pcmdi_metrics.utils import StringConstructor def main(): diff --git a/pcmdi_metrics/monsoon_wang/monsoon_wang_driver.py b/pcmdi_metrics/monsoon_wang/monsoon_wang_driver.py index 688cb763c..1fed57ec8 100644 --- a/pcmdi_metrics/monsoon_wang/monsoon_wang_driver.py +++ b/pcmdi_metrics/monsoon_wang/monsoon_wang_driver.py @@ -4,7 +4,6 @@ import os import cdms2 -import genutil import numpy from genutil import statistics @@ -12,6 +11,7 @@ from pcmdi_metrics import resources from pcmdi_metrics.mean_climate.lib.pmp_parser import PMPParser from pcmdi_metrics.monsoon_wang import mpd, mpi_skill_scores +from pcmdi_metrics.utils import StringConstructor def create_monsoon_wang_parser(): @@ -78,7 +78,7 @@ def create_monsoon_wang_parser(): def monsoon_wang_runner(args): # args = P.parse_args(sys.argv[1:]) - modpath = genutil.StringConstructor(args.test_data_path) + modpath = StringConstructor(args.test_data_path) modpath.variable = args.modvar outpathdata = args.results_dir if isinstance(args.modnames, str): diff --git a/pcmdi_metrics/precip_distribution/precip_distribution_driver.py b/pcmdi_metrics/precip_distribution/precip_distribution_driver.py index 2f2ba2175..3ad63b92d 100644 --- a/pcmdi_metrics/precip_distribution/precip_distribution_driver.py +++ b/pcmdi_metrics/precip_distribution/precip_distribution_driver.py @@ -6,7 +6,6 @@ import MV2 as MV import xarray as xr -from genutil import StringConstructor from pcmdi_metrics.mean_climate.lib.pmp_parser import PMPParser from pcmdi_metrics.precip_distribution.lib import ( @@ -15,6 +14,7 @@ precip_distribution_cum, precip_distribution_frq_amt, ) +from pcmdi_metrics.utils import StringConstructor # Read parameters P = PMPParser() diff --git a/pcmdi_metrics/utils/__init__.py b/pcmdi_metrics/utils/__init__.py index 1ea54dc9a..013c182f6 100644 --- a/pcmdi_metrics/utils/__init__.py +++ b/pcmdi_metrics/utils/__init__.py @@ -1,3 +1,5 @@ from .create_land_sea_mask import apply_landmask, create_land_sea_mask from .create_target_grid import create_target_grid from .sort_human import sort_human +from .string_constructor import StringConstructor, fill_template +from .tree_dict import tree diff --git a/pcmdi_metrics/utils/string_constructor.py b/pcmdi_metrics/utils/string_constructor.py new file mode 100644 index 000000000..1c49d69ba --- /dev/null +++ b/pcmdi_metrics/utils/string_constructor.py @@ -0,0 +1,99 @@ +import warnings + + +class StringConstructor: + """ + This class aims at spotting keywords in a string and replacing them. + """ + + def __init__(self, template=None): + """ + Instantiates a StringConstructor object. + """ + self.template = template + # Generate the keys and set them to empty + keys = self.keys() + for k in keys: + setattr(self, k, "") + + def keys(self, template=None): + if template is None: + template = self.template + if template is None: + return [] + # Determine the keywords in the template + keys = [] + template_split = template.split("%(")[1:] + if len(template_split) > 0: + for k in template_split: + sp = k.split(")") + if sp[0] not in keys: + keys.append(sp[0]) + return keys + + def construct(self, template=None, **kw): + """ + Accepts a string with an unlimited number of keywords to replace. + """ + if template is None: + template = self.template + # Replace the keywords with their values + for k in self.keys(): + if k not in kw: + warnings.warn(f"Keyword '{k}' not provided for filling the template.") + template = template.replace("%(" + k + ")", kw.get(k, getattr(self, k, ""))) + return template + + def reverse(self, name, debug=False): + """ + The reverse function attempts to take a template and derive its keyword values based on name parameter. + """ + out = {} + template = self.template + for k in self.keys(): + sp = template.split("%%(%s)" % k) + i1 = name.find(sp[0]) + len(sp[0]) + j1 = sp[1].find("%(") + if j1 == -1: + if sp[1] == "": + val = name[i1:] + else: + i2 = name.find(sp[1]) + val = name[i1:i2] + else: + i2 = name[i1:].find(sp[1][:j1]) + val = name[i1 : i1 + i2] + template = template.replace("%%(%s)" % k, val) + out[k] = val + if self.construct(self.template, **out) != name: + raise ValueError("Invalid pattern sent") + return out + + def __call__(self, *args, **kw): + """default call is construct function""" + return self.construct(*args, **kw) + + +def fill_template(template: str, **kwargs) -> str: + """ + Fill in a template string with keyword values. + + Parameters + ---------- + - template (str): The template string containing keywords of the form '%(keyword)'. + - kwargs (dict): Keyword arguments with values to replace in the template. + + Returns + ------- + - str: The filled-in string with replaced keywords. + + Examples + -------- + >>> from pcmdi_metrics.utils import fill_template + >>> template = "This is a %(adjective) %(noun) that %(verb)." + >>> filled_string = fill_template(template, adjective="great", noun="example", verb="works") + >>> print(filled_string) # It will print "This is a great example that works." + """ + filler = StringConstructor(template) + filled_template = filler.construct(**kwargs) + return filled_template diff --git a/pcmdi_metrics/utils/tree_dict.py b/pcmdi_metrics/utils/tree_dict.py new file mode 100644 index 000000000..ce3da6c54 --- /dev/null +++ b/pcmdi_metrics/utils/tree_dict.py @@ -0,0 +1,18 @@ +from collections import defaultdict + + +def tree(): + """ + Create a nested defaultdict with itself as the factory. + + Returns: + - defaultdict: A nested defaultdict with a default factory of tree itself. + + Examples + -------- + >>> from pcmdi_metrics.utils import tree + >>> my_tree = tree() + >>> my_tree['level1']['level2']['level3'] = 'value' + >>> print(my_tree['level1']['level2']['level3']) # Output: 'value' + """ + return defaultdict(tree) diff --git a/pcmdi_metrics/utils/utils.py b/pcmdi_metrics/utils/utils.py new file mode 100644 index 000000000..1a20a9029 --- /dev/null +++ b/pcmdi_metrics/utils/utils.py @@ -0,0 +1,75 @@ +import xarray as xr +import xcdat as xc + + +def get_axis_list(ds: xr.Dataset) -> list[str]: + axes = list(ds.coords.keys()) + return axes + + +def get_longitude(ds: xr.Dataset) -> xr.DataArray: + key_lon = xc.axis.get_dim_keys(ds, axis="X") + lon = ds[key_lon] + return lon + + +def get_latitude(ds: xr.Dataset) -> xr.DataArray: + key_lat = xc.axis.get_dim_keys(ds, axis="Y") + lat = ds[key_lat] + return lat + + +def select_subset( + ds: xr.Dataset, lat: tuple = None, lon: tuple = None, time: tuple = None +) -> xr.Dataset: + """_summary_ + + Parameters + ---------- + ds : xr.Dataset + _description_ + lat : tuple, optional + _description_, by default None + lon : tuple, optional + _description_, by default None + time : tuple, optional + _description_, by default None + + Returns + ------- + xr.Dataset + _description_ + + Examples + --------- + Import: + + >>> from pcmdi_metrics.utils import select_subset + + Spatial subsetting: + + >>> (lat1, lat2) = (30, 50) + >>> (lon1, lon2) = (110, 130) + >>> ds_subset = select_subset(ds, lat=(lat1, lat2), lon=(lon1, lon2)) + + Temporal subsetting: + + >>> import cftime + >>> time1 = cftime.DatetimeProlepticGregorian(1850, 1, 16, 12, 0, 0, 0) + >>> time2 = cftime.DatetimeProlepticGregorian(1851, 1, 16, 12, 0, 0, 0) + >>> ds_subset = select_subset(ds, time=(time1, time2)) + """ + + sel_keys = {} + if lat is not None: + lat_key = xc.axis.get_dim_keys(ds, axis="Y") + sel_keys[lat_key] = slice(*lat) + if lon is not None: + lon_key = xc.axis.get_dim_keys(ds, axis="X") + sel_keys[lon_key] = slice(*lon) + if time is not None: + time_key = xc.axis.get_dim_keys(ds, axis="T") + sel_keys[time_key] = slice(*time) + + ds = ds.sel(**sel_keys) + return ds diff --git a/pcmdi_metrics/variability_mode/lib/lib_variability_mode.py b/pcmdi_metrics/variability_mode/lib/lib_variability_mode.py index 1c44c7a95..e3b76d0da 100644 --- a/pcmdi_metrics/variability_mode/lib/lib_variability_mode.py +++ b/pcmdi_metrics/variability_mode/lib/lib_variability_mode.py @@ -161,9 +161,7 @@ def variability_metrics_to_json( outdir, json_filename, result_dict, model=None, run=None, cmec_flag=False ): # Open JSON - JSON = pcmdi_metrics.io.base.Base( - outdir(output_type="metrics_results"), json_filename - ) + JSON = pcmdi_metrics.io.base.Base(outdir, json_filename) # Dict for JSON json_dict = copy.deepcopy(result_dict) if model is not None or run is not None: diff --git a/pcmdi_metrics/variability_mode/scripts_pcmdi/parallel_driver.py b/pcmdi_metrics/variability_mode/scripts_pcmdi/parallel_driver.py index e20fd81f6..e4ac38d30 100755 --- a/pcmdi_metrics/variability_mode/scripts_pcmdi/parallel_driver.py +++ b/pcmdi_metrics/variability_mode/scripts_pcmdi/parallel_driver.py @@ -6,10 +6,9 @@ import os from argparse import RawTextHelpFormatter -from genutil import StringConstructor - from pcmdi_metrics.mean_climate.lib.pmp_parser import PMPParser from pcmdi_metrics.misc.scripts import parallel_submitter +from pcmdi_metrics.utils import StringConstructor from pcmdi_metrics.variability_mode.lib import ( AddParserArgument, VariabilityModeCheck, diff --git a/pcmdi_metrics/variability_mode/scripts_pcmdi/post_process_merge_jsons.py b/pcmdi_metrics/variability_mode/scripts_pcmdi/post_process_merge_jsons.py index 85ba1bb1b..4239c135f 100755 --- a/pcmdi_metrics/variability_mode/scripts_pcmdi/post_process_merge_jsons.py +++ b/pcmdi_metrics/variability_mode/scripts_pcmdi/post_process_merge_jsons.py @@ -7,8 +7,7 @@ import json import os -from genutil import StringConstructor - +from pcmdi_metrics.utils import StringConstructor from pcmdi_metrics.variability_mode.lib import dict_merge diff --git a/pcmdi_metrics/variability_mode/variability_modes_driver.py b/pcmdi_metrics/variability_mode/variability_modes_driver.py index be1bff4b0..b5d6e0e17 100755 --- a/pcmdi_metrics/variability_mode/variability_modes_driver.py +++ b/pcmdi_metrics/variability_mode/variability_modes_driver.py @@ -50,6 +50,7 @@ import glob import json import os +import re import sys from argparse import RawTextHelpFormatter from shutil import copyfile @@ -57,10 +58,10 @@ import cdtime import cdutil import MV2 -from genutil import StringConstructor from pcmdi_metrics import resources from pcmdi_metrics.mean_climate.lib import pmp_parser +from pcmdi_metrics.utils import fill_template, sort_human, tree from pcmdi_metrics.variability_mode.lib import ( AddParserArgument, VariabilityModeCheck, @@ -77,8 +78,6 @@ linear_regression_on_globe_for_teleconnection, plot_map, read_data_in, - sort_human, - tree, variability_metrics_to_json, write_nc_output, ) @@ -166,20 +165,22 @@ obs_var = param.varOBS # Path to model data as string template -modpath = StringConstructor(param.modpath) +modpath = param.modpath if LandMask: - modpath_lf = StringConstructor(param.modpath_lf) + modpath_lf = param.modpath_lf # Check given model option models = param.modnames # Include all models if conditioned if ("all" in [m.lower() for m in models]) or (models == "all"): - model_index_path = param.modpath.split("/")[-1].split(".").index("%(model)") + model_index_path = re.split(". |_", modpath.split("/")[-1]).index("%(model)") models = [ - p.split("/")[-1].split(".")[model_index_path] + re.split(". |_", p.split("/")[-1])[model_index_path] for p in glob.glob( - modpath(mip=mip, exp=exp, model="*", realization="*", variable=var) + fill_template( + modpath, mip=mip, exp=exp, model="*", realization="*", variable=var + ) ) ] # remove duplicates @@ -199,21 +200,6 @@ # case id case_id = param.case_id -# Output -outdir_template = param.process_templated_argument("results_dir") -outdir = StringConstructor( - str( - outdir_template( - output_type="%(output_type)", - mip=mip, - exp=exp, - variability_mode=mode, - reference_data_name=obs_name, - case_id=case_id, - ) - ) -) - # Debug debug = param.debug @@ -230,13 +216,13 @@ ObsUnitsAdjust = param.ObsUnitsAdjust ModUnitsAdjust = param.ModUnitsAdjust -# lon1g and lon2g is for global map plotting +# lon1_global and lon2_global is for global map plotting if mode in ["PDO", "NPGO"]: - lon1g = 0 - lon2g = 360 + lon1_global = 0 + lon2_global = 360 else: - lon1g = -180 - lon2g = 180 + lon1_global = -180 + lon2_global = 180 # parallel parallel = param.parallel @@ -265,9 +251,26 @@ # ================================================= # Create output directories # ------------------------------------------------- -for output_type in ["graphics", "diagnostic_results", "metrics_results"]: - os.makedirs(outdir(output_type=output_type), exist_ok=True) - print(outdir(output_type=output_type)) +outdir_template = param.results_dir + +output_types = ["graphics", "diagnostic_results", "metrics_results"] +dir_paths = {} + +print("output directories:") + +for output_type in output_types: + dir_path = fill_template( + outdir_template, + output_type=output_type, + mip=mip, + exp=exp, + variability_mode=mode, + reference_data_name=obs_name, + case_id=case_id, + ) + os.makedirs(dir_path, exist_ok=True) + print(output_type, ":", dir_path) + dir_paths[output_type] = dir_path # ================================================= # Set dictionary for .json record @@ -289,10 +292,10 @@ str(msyear) + "-" + str(meyear), ] ) +json_file = os.path.join(dir_paths["metrics_results"], json_filename + ".json") -json_file = os.path.join(outdir(output_type="metrics_results"), json_filename + ".json") json_file_org = os.path.join( - outdir(output_type="metrics_results"), + dir_paths["metrics_results"], "_".join([json_filename, "org", str(os.getpid())]) + ".json", ) @@ -439,7 +442,7 @@ if plot_obs: debug_print("plot obs", debug) output_img_file_obs = os.path.join( - outdir(output_type="graphics"), output_filename_obs + dir_paths["graphics"], output_filename_obs ) plot_map( mode, @@ -458,7 +461,7 @@ osyear, oeyear, season, - eof_lr_obs[season](longitude=(lon1g, lon2g)), + eof_lr_obs[season](longitude=(lon1_global, lon2_global)), frac_obs[season], output_img_file_obs + "_teleconnection", debug=debug, @@ -466,10 +469,11 @@ debug_print("obs plotting end", debug) # NetCDF: Save global map, pc timeseries, and fraction in NetCDF output + if nc_out_obs: debug_print("write obs nc", debug) output_nc_file_obs = os.path.join( - outdir(output_type="diagnostic_results"), output_filename_obs + dir_paths["diagnostic_results"], output_filename_obs ) write_nc_output( output_nc_file_obs, @@ -508,7 +512,14 @@ result_dict["RESULTS"][model] = {} model_path_list = glob.glob( - modpath(mip=mip, exp=exp, model=model, realization=realization, variable=var) + fill_template( + modpath, + mip=mip, + exp=exp, + model=model, + realization=realization, + variable=var, + ) ) model_path_list = sort_human(model_path_list) @@ -518,8 +529,13 @@ # Find where run can be gripped from given filename template for modpath if realization == "*": run_in_modpath = ( - modpath( - mip=mip, exp=exp, model=model, realization=realization, variable=var + fill_template( + modpath, + mip=mip, + exp=exp, + model=model, + realization=realization, + variable=var, ) .split("/")[-1] .split(".") @@ -530,7 +546,8 @@ # Run # ------------------------------------------------- for model_path in model_path_list: - try: + # try: + if 1: if realization == "*": run = (model_path.split("/")[-1]).split(".")[run_in_modpath] else: @@ -552,7 +569,7 @@ ] = eofn_mod if LandMask: - model_lf_path = modpath_lf(mip=mip, exp=exp, model=model) + model_lf_path = fill_template(modpath_lf, mip=mip, exp=exp, model=model) else: model_lf_path = None @@ -730,7 +747,7 @@ # Diagnostics results -- data to NetCDF # Save global map, pc timeseries, and fraction in NetCDF output output_nc_file = os.path.join( - outdir(output_type="diagnostic_results"), output_filename + dir_paths["diagnostic_results"], output_filename ) if nc_out_model: write_nc_output( @@ -744,7 +761,7 @@ # Graphics -- plot map image to PNG output_img_file = os.path.join( - outdir(output_type="graphics"), output_filename + dir_paths["graphics"], output_filename ) if plot_model: plot_map( @@ -764,7 +781,7 @@ msyear, meyear, season, - eof_lr_cbf(longitude=(lon1g, lon2g)), + eof_lr_cbf(longitude=(lon1_global, lon2_global)), frac_cbf, output_img_file + "_cbf_teleconnection", debug=debug, @@ -904,7 +921,7 @@ # Diagnostics results -- data to NetCDF # Save global map, pc timeseries, and fraction in NetCDF output output_nc_file = os.path.join( - outdir(output_type="diagnostic_results"), output_filename + dir_paths["diagnostic_results"], output_filename ) if nc_out_model: write_nc_output( @@ -913,7 +930,7 @@ # Graphics -- plot map image to PNG output_img_file = os.path.join( - outdir(output_type="graphics"), output_filename + dir_paths["graphics"], output_filename ) if plot_model: plot_map( @@ -945,7 +962,7 @@ msyear, meyear, season, - eof_lr(longitude=(lon1g, lon2g)), + eof_lr(longitude=(lon1_global, lon2_global)), frac, output_img_file + "_teleconnection", debug=debug, @@ -998,21 +1015,21 @@ ] ) variability_metrics_to_json( - outdir, + dir_paths["metrics_results"], json_filename_tmp, result_dict, model=model, run=run, cmec_flag=cmec, ) - + """ except Exception as err: if debug: raise else: print("warning: failed for ", model, run, err) pass - + """ # ======================================================================== # Dictionary to JSON: collective JSON at the end of model_realization loop # ------------------------------------------------------------------------ @@ -1033,7 +1050,9 @@ str(msyear) + "-" + str(meyear), ] ) - variability_metrics_to_json(outdir, json_filename_all, result_dict, cmec_flag=cmec) + variability_metrics_to_json( + dir_paths["metrics_results"], json_filename_all, result_dict, cmec_flag=cmec + ) if not debug: sys.exit(0)