diff --git a/.github/workflows/build_workflow.yml b/.github/workflows/build_workflow.yml index 6cd2f2dab..fd7148155 100644 --- a/.github/workflows/build_workflow.yml +++ b/.github/workflows/build_workflow.yml @@ -25,21 +25,27 @@ jobs: needs: check-jobs-to-skip if: ${{ needs.check-jobs-to-skip.outputs.should_skip != 'true'}} || ${{ github.event_name == 'push' }} runs-on: ubuntu-latest - timeout-minutes: 2 + timeout-minutes: 10 steps: + - uses: actions/setup-node@v3 + with: + node-version: '16' - name: Checkout Code Repository uses: actions/checkout@v2 - + with: + node-version: '16' - name: Set up Python 3.9 uses: actions/setup-python@v2 with: + node-version: '16' python-version: 3.9 - # Run all pre-commit hooks on all the files. # Getting only staged files can be tricky in case a new PR is opened # since the action is run on a branch in detached head state - name: Install and Run Pre-commit uses: pre-commit/action@v2.0.3 + with: + node-version: '16' build: needs: check-jobs-to-skip @@ -50,14 +56,17 @@ jobs: shell: bash -l {0} timeout-minutes: 10 steps: + - uses: actions/setup-node@v3 + with: + node-version: '16' - uses: actions/checkout@v2 - - name: Cache Conda uses: actions/cache@v2 env: # Increase this value to reset cache if conda/dev.yml has not changed in the workflow CACHE_NUMBER: 0 with: + node-version: '16' path: ~/conda_pkgs_dir key: ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-publish diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 173641f63..dd8e8b451 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -4,7 +4,7 @@ fail_fast: true repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.3.0 + rev: v4.4.0 hooks: - id: trailing-whitespace args: [--markdown-linebreak-ext=md] @@ -12,7 +12,7 @@ repos: - id: check-yaml - repo: https://github.com/psf/black - rev: 22.10.0 + rev: 23.1.0 hooks: - id: black @@ -24,7 +24,7 @@ repos: # Need to use flake8 GitHub mirror due to CentOS git issue with GitLab # https://github.com/pre-commit/pre-commit/issues/1206 - repo: https://github.com/pycqa/flake8 - rev: 5.0.4 + rev: 6.0.0 hooks: - id: flake8 args: ["--config=setup.cfg"] diff --git a/cmec/scripts/pmp_param_generator.py b/cmec/scripts/pmp_param_generator.py index 1161d54b8..b7a435942 100644 --- a/cmec/scripts/pmp_param_generator.py +++ b/cmec/scripts/pmp_param_generator.py @@ -24,7 +24,6 @@ def check_for_opt(key, settings): if __name__ == "__main__": - config_json = sys.argv[1] out_file_name = sys.argv[2] pmp_config = sys.argv[3] diff --git a/conda-env/dev.yml b/conda-env/dev.yml index c068dd776..c8090fc75 100644 --- a/conda-env/dev.yml +++ b/conda-env/dev.yml @@ -19,11 +19,13 @@ dependencies: - eofs=1.4.0 - seaborn=0.11.1 - enso_metrics=1.1.1 + - xcdat=0.4.0 + - xmltodict=0.13.0 + - setuptools=65.5.0 - netcdf4=1.6.0 - regionmask=0.9.0 - rasterio=1.2.10 - shapely=1.8.0 - - xcdat=0.4.0 # Testing # ================== - pre_commit=2.20.0 diff --git a/doc/jupyter/Demo/Demo_1b_mean_climate.ipynb b/doc/jupyter/Demo/Demo_1b_mean_climate.ipynb index c82af02ac..76c1b948a 100644 --- a/doc/jupyter/Demo/Demo_1b_mean_climate.ipynb +++ b/doc/jupyter/Demo/Demo_1b_mean_climate.ipynb @@ -40,7 +40,8 @@ "cell_type": "code", "execution_count": 2, "metadata": { - "scrolled": true + "scrolled": true, + "tags": [] }, "outputs": [ { @@ -126,34 +127,156 @@ "cell_type": "code", "execution_count": 3, "metadata": { - "scrolled": true + "scrolled": true, + "tags": [] }, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: REGION: Global\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: alternate1 is an obs\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: Could not figure out obs mask name from obs json file\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: TEST DATA IS: ACCESS1-0\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: ACCESS1-0 is a model\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut_2.5x2.5_regrid2_linear_metrics\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut_2.5x2.5_regrid2_linear_metrics.json\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: TEST DATA IS: CanCM4\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: CanCM4 is a model\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut_2.5x2.5_regrid2_linear_metrics\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut_2.5x2.5_regrid2_linear_metrics.json\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: default is an obs\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: Could not figure out obs mask name from obs json file\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: TEST DATA IS: ACCESS1-0\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: ACCESS1-0 is a model\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut_2.5x2.5_regrid2_linear_metrics\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut_2.5x2.5_regrid2_linear_metrics.json\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: TEST DATA IS: CanCM4\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: CanCM4 is a model\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut_2.5x2.5_regrid2_linear_metrics\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::basicTest:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut_2.5x2.5_regrid2_linear_metrics.json\n" + "2023-02-21 14:17:32,585 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:18::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-21 14:18:04,318 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-21 14:18:04,348 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:18::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-21 14:18:32,211 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-21 14:18:34,137 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:19::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "2023-02-21 14:19:02,987 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "2023-02-21 14:19:03,017 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:19::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "2023-02-21 14:19:34,352 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "INFO::2023-02-21 14:19::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut_2.5x2.5_regrid2_metrics.json\n", + "2023-02-21 14:19:34,357 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/basicTest/rlut_2.5x2.5_regrid2_metrics.json\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "case_id: basicTest \n", + " test_data_set: ['ACCESS1-0', 'CanCM4'] \n", + " realization: \n", + " vars: ['rlut'] \n", + " reference_data_set: ['all'] \n", + " target_grid: 2.5x2.5 \n", + " regrid_tool: regrid2 \n", + " regrid_tool_ocn: esmf \n", + " save_test_clims: False \n", + " test_clims_interpolated_output: None \n", + " filename_template: cmip5.historical.%(model_version).r1i1p1.mon.%(variable).198101-200512.AC.v20200426.nc \n", + " sftlf_filename_template: sftlf_%(model_version).nc \n", + " generate_sftlf: True \n", + " regions_specs: {'NHEX': {'domain': {'latitude': (30.0, 90)}}, 'SHEX': {'domain': {'latitude': (-90.0, -30)}}, 'TROPICS': {'domain': {'latitude': (-30.0, 30)}}, 'global': {}, '90S50S': {'domain': {'latitude': (-90.0, -50)}}, '50S20S': {'domain': {'latitude': (-50.0, -20)}}, '20S20N': {'domain': {'latitude': (-20.0, 20)}}, '20N50N': {'domain': {'latitude': (20.0, 50)}}, '50N90N': {'domain': {'latitude': (50.0, 90)}}, 'land_NHEX': {'value': 100, 'domain': {'latitude': (30.0, 90)}}, 'land_SHEX': {'value': 100, 'domain': {'latitude': (-90.0, -30)}}, 'land_TROPICS': {'value': 100, 'domain': {'latitude': (-30.0, 30)}}, 'land': {'value': 100}, 'ocean_NHEX': {'value': 0, 'domain': {'latitude': (30.0, 90)}}, 'ocean_SHEX': {'value': 0, 'domain': {'latitude': (-90.0, -30)}}, 'ocean_TROPICS': {'value': 0, 'domain': {'latitude': (30.0, 30)}}, 'ocean': {'value': 0}, 'CONUS': {'domain': {'latitude': (24.7, 49.4), 'longitude': (-124.78, -66.92)}}, 'land_CONUS': {'value': 100, 'domain': {'latitude': (24.7, 49.4), 'longitude': (-124.78, -66.92)}}, 'NAM': {'domain': {'latitude': (20.0, 90), 'longitude': (-180, 180)}}, 'NAO': {'domain': {'latitude': (20.0, 80), 'longitude': (-90, 40)}}, 'SAM': {'domain': {'latitude': (-20.0, -90), 'longitude': (0, 360)}}, 'PNA': {'domain': {'latitude': (20.0, 85), 'longitude': (120, 240)}}, 'PDO': {'domain': {'latitude': (20.0, 70), 'longitude': (110, 260)}}, 'AllMW': {'domain': {'latitude': (-40.0, 45.0), 'longitude': (0.0, 360.0)}}, 'AllM': {'domain': {'latitude': (-45.0, 45.0), 'longitude': (0.0, 360.0)}}, 'NAMM': {'domain': {'latitude': (0.0, 45.0), 'longitude': (210.0, 310.0)}}, 'SAMM': {'domain': {'latitude': (-45.0, 0.0), 'longitude': (240.0, 330.0)}}, 'NAFM': {'domain': {'latitude': (0.0, 45.0), 'longitude': (310.0, 60.0)}}, 'SAFM': {'domain': {'latitude': (-45.0, 0.0), 'longitude': (0.0, 90.0)}}, 'ASM': {'domain': {'latitude': (0.0, 45.0), 'longitude': (60.0, 180.0)}}, 'AUSM': {'domain': {'latitude': (-45.0, 0.0), 'longitude': (90.0, 160.0)}}, 'AIR': {'domain': {'latitude': (7.0, 25.0), 'longitude': (65.0, 85.0)}}, 'AUS': {'domain': {'latitude': (-20.0, -10.0), 'longitude': (120.0, 150.0)}}, 'Sahel': {'domain': {'latitude': (13.0, 18.0), 'longitude': (-10.0, 10.0)}}, 'GoG': {'domain': {'latitude': (0.0, 5.0), 'longitude': (-10.0, 10.0)}}, 'NAmo': {'domain': {'latitude': (20.0, 37.0), 'longitude': (-112.0, -103.0)}}, 'SAmo': {'domain': {'latitude': (-20.0, 2.5), 'longitude': (-65.0, -40.0)}}} \n", + " regions: {'rlut': ['Global']} \n", + " test_data_path: demo_data/CMIP5_demo_clims/ \n", + " reference_data_path: demo_data/obs4MIPs_PCMDI_clims \n", + " metrics_output_path: demo_output/basicTest \n", + " debug: False \n", + "\n", + "--- prepare mean climate metrics calculation ---\n", + "--- start mean climate metrics calculation ---\n", + "varname: rlut\n", + "level: None\n", + "reference_data_set (all): ['alternate1', 'default']\n", + "ref: alternate1\n", + "ref_data_full_path: demo_data/obs4MIPs_PCMDI_clims/rlut/CERES-EBAF-4-0/v20210804/rlut_mon_CERES-EBAF-4-0_PCMDI_gn.200301-201812.AC.v20210804.nc\n", + "-----------------------\n", + "model, run: ACCESS1-0 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.ACCESS1-0.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: Global\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "-----------------------\n", + "model, run: CanCM4 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.CanCM4.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: Global\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "ref: default\n", + "ref_data_full_path: demo_data/obs4MIPs_PCMDI_clims/rlut/CERES-EBAF-4-1/v20210804/rlut_mon_CERES-EBAF-4-1_PCMDI_gn.200301-201812.AC.v20210804.nc\n", + "-----------------------\n", + "model, run: ACCESS1-0 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.ACCESS1-0.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: Global\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "-----------------------\n", + "model, run: CanCM4 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.CanCM4.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: Global\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "pmp mean clim driver completed\n" ] } ], @@ -172,7 +295,10 @@ { "cell_type": "code", "execution_count": 4, - "metadata": {}, + "metadata": { + "scrolled": true, + "tags": [] + }, "outputs": [ { "name": "stdout", @@ -180,27 +306,11 @@ "text": [ "{\n", " \"ACCESS1-0\": {\n", - " \"units\": \"W m-2\",\n", - " \"SimulationDescription\": {\n", - " \"MIPTable\": \"Amon\",\n", - " \"Model\": \"ACCESS1-0\",\n", - " \"ModelActivity\": \"CMIP5\",\n", - " \"ModellingGroup\": \"CSIRO-BOM\",\n", - " \"Experiment\": \"historical\",\n", - " \"ModelFreeSpace\": \"N/A\",\n", - " \"Realization\": \"\",\n", - " \"creation_date\": \"2012-01-15T12:34:39Z\"\n", - " },\n", - " \"InputClimatologyFileName\": \"cmip5.historical.ACCESS1-0.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\",\n", - " \"InputClimatologyMD5\": \"16fb29fa02cc8c68e170502bca145640\",\n", - " \"InputRegionFileName\": null,\n", - " \"InputRegionMD5\": null,\n", " \"alternate1\": {\n", - " \"source\": \"CERES-EBAF-4-0\",\n", " \"\": {\n", " \"Global\": {\n", " \"bias_xy\": {\n", - " \"ann\": \"1.124\",\n", + " \"ann\": \"1.138\",\n", " \"djf\": \"1.675\",\n", " \"mam\": \"1.392\",\n", " \"jja\": \"0.859\",\n", @@ -242,7 +352,7 @@ " ]\n", " },\n", " \"mae_xy\": {\n", - " \"ann\": \"5.784\",\n", + " \"ann\": \"5.770\",\n", " \"djf\": \"7.158\",\n", " \"mam\": \"7.246\",\n", " \"jja\": \"7.512\",\n", @@ -263,7 +373,7 @@ " ]\n", " },\n", " \"mean-obs_xy\": {\n", - " \"ann\": \"240.331\",\n", + " \"ann\": \"240.317\",\n", " \"djf\": \"237.540\",\n", " \"mam\": \"239.327\",\n", " \"jja\": \"243.879\",\n", @@ -305,10 +415,10 @@ " ]\n", " },\n", " \"rms_devzm\": {\n", - " \"ann\": \"5.805\"\n", + " \"ann\": \"5.808\"\n", " },\n", " \"rms_xy\": {\n", - " \"ann\": \"8.062\",\n", + " \"ann\": \"8.043\",\n", " \"djf\": \"10.231\",\n", " \"mam\": \"10.774\",\n", " \"jja\": \"10.439\",\n", @@ -329,13 +439,13 @@ " ]\n", " },\n", " \"rms_xyt\": {\n", - " \"ann\": \"11.457\"\n", + " \"ann\": \"11.443\"\n", " },\n", " \"rms_y\": {\n", - " \"ann\": \"4.833\"\n", + " \"ann\": \"5.565\"\n", " },\n", " \"rmsc_xy\": {\n", - " \"ann\": \"7.983\",\n", + " \"ann\": \"7.962\",\n", " \"djf\": \"10.093\",\n", " \"mam\": \"10.684\",\n", " \"jja\": \"10.404\",\n", @@ -356,7 +466,7 @@ " ]\n", " },\n", " \"std-obs_xy\": {\n", - " \"ann\": \"29.645\",\n", + " \"ann\": \"29.642\",\n", " \"djf\": \"32.679\",\n", " \"mam\": \"30.811\",\n", " \"jja\": \"35.368\",\n", @@ -377,10 +487,10 @@ " ]\n", " },\n", " \"std-obs_xy_devzm\": {\n", - " \"ann\": \"12.980\"\n", + " \"ann\": \"12.977\"\n", " },\n", " \"std-obs_xyt\": {\n", - " \"ann\": \"33.463\"\n", + " \"ann\": \"33.461\"\n", " },\n", " \"std_xy\": {\n", " \"ann\": \"31.968\",\n", @@ -413,11 +523,10 @@ " }\n", " },\n", " \"default\": {\n", - " \"source\": \"CERES-EBAF-4-1\",\n", " \"\": {\n", " \"Global\": {\n", " \"bias_xy\": {\n", - " \"ann\": \"1.122\",\n", + " \"ann\": \"1.137\",\n", " \"djf\": \"1.644\",\n", " \"mam\": \"1.325\",\n", " \"jja\": \"0.866\",\n", @@ -459,7 +568,7 @@ " ]\n", " },\n", " \"mae_xy\": {\n", - " \"ann\": \"5.777\",\n", + " \"ann\": \"5.763\",\n", " \"djf\": \"7.165\",\n", " \"mam\": \"7.307\",\n", " \"jja\": \"7.555\",\n", @@ -480,7 +589,7 @@ " ]\n", " },\n", " \"mean-obs_xy\": {\n", - " \"ann\": \"240.333\",\n", + " \"ann\": \"240.318\",\n", " \"djf\": \"237.570\",\n", " \"mam\": \"239.394\",\n", " \"jja\": \"243.872\",\n", @@ -522,10 +631,10 @@ " ]\n", " },\n", " \"rms_devzm\": {\n", - " \"ann\": \"5.804\"\n", + " \"ann\": \"5.807\"\n", " },\n", " \"rms_xy\": {\n", - " \"ann\": \"8.050\",\n", + " \"ann\": \"8.033\",\n", " \"djf\": \"10.240\",\n", " \"mam\": \"10.871\",\n", " \"jja\": \"10.484\",\n", @@ -546,13 +655,13 @@ " ]\n", " },\n", " \"rms_xyt\": {\n", - " \"ann\": \"11.457\"\n", + " \"ann\": \"11.441\"\n", " },\n", " \"rms_y\": {\n", - " \"ann\": \"4.819\"\n", + " \"ann\": \"5.549\"\n", " },\n", " \"rmsc_xy\": {\n", - " \"ann\": \"7.972\",\n", + " \"ann\": \"7.952\",\n", " \"djf\": \"10.107\",\n", " \"mam\": \"10.790\",\n", " \"jja\": \"10.449\",\n", @@ -573,7 +682,7 @@ " ]\n", " },\n", " \"std-obs_xy\": {\n", - " \"ann\": \"29.642\",\n", + " \"ann\": \"29.638\",\n", " \"djf\": \"32.730\",\n", " \"mam\": \"30.769\",\n", " \"jja\": \"35.354\",\n", @@ -594,10 +703,10 @@ " ]\n", " },\n", " \"std-obs_xy_devzm\": {\n", - " \"ann\": \"12.973\"\n", + " \"ann\": \"12.970\"\n", " },\n", " \"std-obs_xyt\": {\n", - " \"ann\": \"33.413\"\n", + " \"ann\": \"33.411\"\n", " },\n", " \"std_xy\": {\n", " \"ann\": \"31.968\",\n", @@ -631,27 +740,11 @@ " }\n", " },\n", " \"CanCM4\": {\n", - " \"units\": \"W m-2\",\n", - " \"SimulationDescription\": {\n", - " \"MIPTable\": \"Amon\",\n", - " \"Model\": \"CanCM4\",\n", - " \"ModelActivity\": \"CMIP5\",\n", - " \"ModellingGroup\": \"CCCma\",\n", - " \"Experiment\": \"historical\",\n", - " \"ModelFreeSpace\": \"N/A\",\n", - " \"Realization\": \"\",\n", - " \"creation_date\": \"2012-01-31T22:04:48Z\"\n", - " },\n", - " \"InputClimatologyFileName\": \"cmip5.historical.CanCM4.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\",\n", - " \"InputClimatologyMD5\": \"40b2bfa71a3b7d2febb55652ef551001\",\n", - " \"InputRegionFileName\": null,\n", - " \"InputRegionMD5\": null,\n", " \"alternate1\": {\n", - " \"source\": \"CERES-EBAF-4-0\",\n", " \"\": {\n", " \"Global\": {\n", " \"bias_xy\": {\n", - " \"ann\": \"-1.178\",\n", + " \"ann\": \"-1.164\",\n", " \"djf\": \"-0.905\",\n", " \"mam\": \"-0.959\",\n", " \"jja\": \"-1.403\",\n", @@ -693,7 +786,7 @@ " ]\n", " },\n", " \"mae_xy\": {\n", - " \"ann\": \"6.327\",\n", + " \"ann\": \"6.329\",\n", " \"djf\": \"7.489\",\n", " \"mam\": \"8.016\",\n", " \"jja\": \"7.625\",\n", @@ -714,7 +807,7 @@ " ]\n", " },\n", " \"mean-obs_xy\": {\n", - " \"ann\": \"240.331\",\n", + " \"ann\": \"240.317\",\n", " \"djf\": \"237.540\",\n", " \"mam\": \"239.327\",\n", " \"jja\": \"243.879\",\n", @@ -756,10 +849,10 @@ " ]\n", " },\n", " \"rms_devzm\": {\n", - " \"ann\": \"9.351\"\n", + " \"ann\": \"9.364\"\n", " },\n", " \"rms_xy\": {\n", - " \"ann\": \"9.606\",\n", + " \"ann\": \"9.610\",\n", " \"djf\": \"10.947\",\n", " \"mam\": \"11.785\",\n", " \"jja\": \"11.218\",\n", @@ -780,13 +873,13 @@ " ]\n", " },\n", " \"rms_xyt\": {\n", - " \"ann\": \"12.595\"\n", + " \"ann\": \"12.576\"\n", " },\n", " \"rms_y\": {\n", - " \"ann\": \"2.472\"\n", + " \"ann\": \"2.161\"\n", " },\n", " \"rmsc_xy\": {\n", - " \"ann\": \"9.533\",\n", + " \"ann\": \"9.539\",\n", " \"djf\": \"10.910\",\n", " \"mam\": \"11.746\",\n", " \"jja\": \"11.130\",\n", @@ -807,7 +900,7 @@ " ]\n", " },\n", " \"std-obs_xy\": {\n", - " \"ann\": \"29.645\",\n", + " \"ann\": \"29.642\",\n", " \"djf\": \"32.679\",\n", " \"mam\": \"30.811\",\n", " \"jja\": \"35.368\",\n", @@ -828,10 +921,10 @@ " ]\n", " },\n", " \"std-obs_xy_devzm\": {\n", - " \"ann\": \"12.980\"\n", + " \"ann\": \"12.977\"\n", " },\n", " \"std-obs_xyt\": {\n", - " \"ann\": \"33.463\"\n", + " \"ann\": \"33.461\"\n", " },\n", " \"std_xy\": {\n", " \"ann\": \"31.121\",\n", @@ -864,11 +957,10 @@ " }\n", " },\n", " \"default\": {\n", - " \"source\": \"CERES-EBAF-4-1\",\n", " \"\": {\n", " \"Global\": {\n", " \"bias_xy\": {\n", - " \"ann\": \"-1.180\",\n", + " \"ann\": \"-1.165\",\n", " \"djf\": \"-0.936\",\n", " \"mam\": \"-1.026\",\n", " \"jja\": \"-1.395\",\n", @@ -910,7 +1002,7 @@ " ]\n", " },\n", " \"mae_xy\": {\n", - " \"ann\": \"6.329\",\n", + " \"ann\": \"6.332\",\n", " \"djf\": \"7.484\",\n", " \"mam\": \"8.026\",\n", " \"jja\": \"7.644\",\n", @@ -931,7 +1023,7 @@ " ]\n", " },\n", " \"mean-obs_xy\": {\n", - " \"ann\": \"240.333\",\n", + " \"ann\": \"240.318\",\n", " \"djf\": \"237.570\",\n", " \"mam\": \"239.394\",\n", " \"jja\": \"243.872\",\n", @@ -973,10 +1065,10 @@ " ]\n", " },\n", " \"rms_devzm\": {\n", - " \"ann\": \"9.351\"\n", + " \"ann\": \"9.364\"\n", " },\n", " \"rms_xy\": {\n", - " \"ann\": \"9.603\",\n", + " \"ann\": \"9.608\",\n", " \"djf\": \"10.915\",\n", " \"mam\": \"11.801\",\n", " \"jja\": \"11.246\",\n", @@ -997,13 +1089,13 @@ " ]\n", " },\n", " \"rms_xyt\": {\n", - " \"ann\": \"12.608\"\n", + " \"ann\": \"12.587\"\n", " },\n", " \"rms_y\": {\n", - " \"ann\": \"2.467\"\n", + " \"ann\": \"2.152\"\n", " },\n", " \"rmsc_xy\": {\n", - " \"ann\": \"9.531\",\n", + " \"ann\": \"9.537\",\n", " \"djf\": \"10.875\",\n", " \"mam\": \"11.756\",\n", " \"jja\": \"11.159\",\n", @@ -1024,7 +1116,7 @@ " ]\n", " },\n", " \"std-obs_xy\": {\n", - " \"ann\": \"29.642\",\n", + " \"ann\": \"29.638\",\n", " \"djf\": \"32.730\",\n", " \"mam\": \"30.769\",\n", " \"jja\": \"35.354\",\n", @@ -1045,10 +1137,10 @@ " ]\n", " },\n", " \"std-obs_xy_devzm\": {\n", - " \"ann\": \"12.973\"\n", + " \"ann\": \"12.970\"\n", " },\n", " \"std-obs_xyt\": {\n", - " \"ann\": \"33.413\"\n", + " \"ann\": \"33.411\"\n", " },\n", " \"std_xy\": {\n", " \"ann\": \"31.121\",\n", @@ -1088,7 +1180,7 @@ "source": [ "import json\n", "import os\n", - "output_path = os.path.join(demo_output_directory,\"basicTest/rlut_2.5x2.5_regrid2_linear_metrics.json\")\n", + "output_path = os.path.join(demo_output_directory,\"basicTest/rlut_2.5x2.5_regrid2_metrics.json\")\n", "with open(output_path) as f:\n", " metric = json.load(f)[\"RESULTS\"]\n", "print(json.dumps(metric, indent=2))" @@ -1109,14 +1201,69 @@ " \n", "This next cell demonstrates how to change 'case_id' and 'regrid_tool' on the command line. Changing the 'case_id' is helpful because these results will be stored in a folder with that name, separate from other runs. \n", "\n", - "The two regrid tools available are 'regrid2' and 'esmf'. 'regrid2' is recommended, but 'esmf' must be used with non-rectangular grids. \n", - " \n", - "Both the model data sets and observations are regridded to a 2.5 by 2.5 degree grid before producing statistics. To interpolate to a different grid, the user should provide a [cdms2 grid object](https://cdms.readthedocs.io/en/latest/manual/cdms_2.html#id9) as the `target_grid`." + "Both the model data sets and observations are regridded to a 2.5 by 2.5 degree grid before producing statistics. To interpolate to a different grid, the user should provide a [cdms2 grid object](https://cdms.readthedocs.io/en/latest/manual/cdms_2.html#id9) as the `target_grid`.\n", + "\n", + "The two regrid tools available are 'regrid2' and ['xesmf'](https://xesmf.readthedocs.io/en/latest/). 'regrid2' is recommended, but 'xesmf' must be used with non-rectangular grids. " ] }, { "cell_type": "code", "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'\\nimport sys\\n!conda install --yes --prefix {sys.prefix} -c conda-forge xesmf\\n'" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# for conda\n", + "\"\"\"\n", + "import sys\n", + "!conda install --yes --prefix {sys.prefix} -c conda-forge xesmf\n", + "\"\"\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "OR delete the triple quotations on lines 2&5 from this cell to install with pip:" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'\\nimport sys\\n!{sys.executable} -m pip install git+https://github.com/pangeo-data/xesmf.git\\n'" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# for pip\n", + "\"\"\"\n", + "import sys\n", + "!{sys.executable} -m pip install git+https://github.com/pangeo-data/xesmf.git\n", + "\"\"\"" + ] + }, + { + "cell_type": "code", + "execution_count": 7, "metadata": { "scrolled": true }, @@ -1125,33 +1272,154 @@ "name": "stderr", "output_type": "stream", "text": [ - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: REGION: Global\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: alternate1 is an obs\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: Could not figure out obs mask name from obs json file\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: TEST DATA IS: ACCESS1-0\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: ACCESS1-0 is a model\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut_2.5x2.5_esmf_linear_metrics\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut_2.5x2.5_esmf_linear_metrics.json\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: TEST DATA IS: CanCM4\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: CanCM4 is a model\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut_2.5x2.5_esmf_linear_metrics\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut_2.5x2.5_esmf_linear_metrics.json\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: default is an obs\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: Could not figure out obs mask name from obs json file\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: TEST DATA IS: ACCESS1-0\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: ACCESS1-0 is a model\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut_2.5x2.5_esmf_linear_metrics\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut_2.5x2.5_esmf_linear_metrics.json\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: TEST DATA IS: CanCM4\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: CanCM4 is a model\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut_2.5x2.5_esmf_linear_metrics\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex2:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut_2.5x2.5_esmf_linear_metrics.json\n" + "2023-02-21 14:19:44,197 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:20::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut/ACCESS1-0_rlut_2.5x2.5_xesmf_metrics_alternate1.json\n", + "2023-02-21 14:20:31,985 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut/ACCESS1-0_rlut_2.5x2.5_xesmf_metrics_alternate1.json\n", + "2023-02-21 14:20:32,017 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:21::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut/CanCM4_rlut_2.5x2.5_xesmf_metrics_alternate1.json\n", + "2023-02-21 14:21:11,672 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut/CanCM4_rlut_2.5x2.5_xesmf_metrics_alternate1.json\n", + "2023-02-21 14:21:14,865 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:21::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut/ACCESS1-0_rlut_2.5x2.5_xesmf_metrics_default.json\n", + "2023-02-21 14:21:57,643 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut/ACCESS1-0_rlut_2.5x2.5_xesmf_metrics_default.json\n", + "2023-02-21 14:21:57,670 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:22::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut/CanCM4_rlut_2.5x2.5_xesmf_metrics_default.json\n", + "2023-02-21 14:22:36,898 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut/CanCM4_rlut_2.5x2.5_xesmf_metrics_default.json\n", + "INFO::2023-02-21 14:22::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut_2.5x2.5_xesmf_metrics.json\n", + "2023-02-21 14:22:47,436 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex2/rlut_2.5x2.5_xesmf_metrics.json\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "case_id: Ex2 \n", + " test_data_set: ['ACCESS1-0', 'CanCM4'] \n", + " realization: \n", + " vars: ['rlut'] \n", + " reference_data_set: ['all'] \n", + " target_grid: 2.5x2.5 \n", + " regrid_tool: xesmf \n", + " regrid_tool_ocn: esmf \n", + " save_test_clims: False \n", + " test_clims_interpolated_output: None \n", + " filename_template: cmip5.historical.%(model_version).r1i1p1.mon.%(variable).198101-200512.AC.v20200426.nc \n", + " sftlf_filename_template: sftlf_%(model_version).nc \n", + " generate_sftlf: True \n", + " regions_specs: {'NHEX': {'domain': {'latitude': (30.0, 90)}}, 'SHEX': {'domain': {'latitude': (-90.0, -30)}}, 'TROPICS': {'domain': {'latitude': (-30.0, 30)}}, 'global': {}, '90S50S': {'domain': {'latitude': (-90.0, -50)}}, '50S20S': {'domain': {'latitude': (-50.0, -20)}}, '20S20N': {'domain': {'latitude': (-20.0, 20)}}, '20N50N': {'domain': {'latitude': (20.0, 50)}}, '50N90N': {'domain': {'latitude': (50.0, 90)}}, 'land_NHEX': {'value': 100, 'domain': {'latitude': (30.0, 90)}}, 'land_SHEX': {'value': 100, 'domain': {'latitude': (-90.0, -30)}}, 'land_TROPICS': {'value': 100, 'domain': {'latitude': (-30.0, 30)}}, 'land': {'value': 100}, 'ocean_NHEX': {'value': 0, 'domain': {'latitude': (30.0, 90)}}, 'ocean_SHEX': {'value': 0, 'domain': {'latitude': (-90.0, -30)}}, 'ocean_TROPICS': {'value': 0, 'domain': {'latitude': (30.0, 30)}}, 'ocean': {'value': 0}, 'CONUS': {'domain': {'latitude': (24.7, 49.4), 'longitude': (-124.78, -66.92)}}, 'land_CONUS': {'value': 100, 'domain': {'latitude': (24.7, 49.4), 'longitude': (-124.78, -66.92)}}, 'NAM': {'domain': {'latitude': (20.0, 90), 'longitude': (-180, 180)}}, 'NAO': {'domain': {'latitude': (20.0, 80), 'longitude': (-90, 40)}}, 'SAM': {'domain': {'latitude': (-20.0, -90), 'longitude': (0, 360)}}, 'PNA': {'domain': {'latitude': (20.0, 85), 'longitude': (120, 240)}}, 'PDO': {'domain': {'latitude': (20.0, 70), 'longitude': (110, 260)}}, 'AllMW': {'domain': {'latitude': (-40.0, 45.0), 'longitude': (0.0, 360.0)}}, 'AllM': {'domain': {'latitude': (-45.0, 45.0), 'longitude': (0.0, 360.0)}}, 'NAMM': {'domain': {'latitude': (0.0, 45.0), 'longitude': (210.0, 310.0)}}, 'SAMM': {'domain': {'latitude': (-45.0, 0.0), 'longitude': (240.0, 330.0)}}, 'NAFM': {'domain': {'latitude': (0.0, 45.0), 'longitude': (310.0, 60.0)}}, 'SAFM': {'domain': {'latitude': (-45.0, 0.0), 'longitude': (0.0, 90.0)}}, 'ASM': {'domain': {'latitude': (0.0, 45.0), 'longitude': (60.0, 180.0)}}, 'AUSM': {'domain': {'latitude': (-45.0, 0.0), 'longitude': (90.0, 160.0)}}, 'AIR': {'domain': {'latitude': (7.0, 25.0), 'longitude': (65.0, 85.0)}}, 'AUS': {'domain': {'latitude': (-20.0, -10.0), 'longitude': (120.0, 150.0)}}, 'Sahel': {'domain': {'latitude': (13.0, 18.0), 'longitude': (-10.0, 10.0)}}, 'GoG': {'domain': {'latitude': (0.0, 5.0), 'longitude': (-10.0, 10.0)}}, 'NAmo': {'domain': {'latitude': (20.0, 37.0), 'longitude': (-112.0, -103.0)}}, 'SAmo': {'domain': {'latitude': (-20.0, 2.5), 'longitude': (-65.0, -40.0)}}} \n", + " regions: {'rlut': ['Global']} \n", + " test_data_path: demo_data/CMIP5_demo_clims/ \n", + " reference_data_path: demo_data/obs4MIPs_PCMDI_clims \n", + " metrics_output_path: demo_output/Ex2 \n", + " debug: False \n", + "\n", + "--- prepare mean climate metrics calculation ---\n", + "--- start mean climate metrics calculation ---\n", + "varname: rlut\n", + "level: None\n", + "reference_data_set (all): ['alternate1', 'default']\n", + "ref: alternate1\n", + "ref_data_full_path: demo_data/obs4MIPs_PCMDI_clims/rlut/CERES-EBAF-4-0/v20210804/rlut_mon_CERES-EBAF-4-0_PCMDI_gn.200301-201812.AC.v20210804.nc\n", + "-----------------------\n", + "model, run: ACCESS1-0 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.ACCESS1-0.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: Global\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "-----------------------\n", + "model, run: CanCM4 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.CanCM4.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: Global\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "ref: default\n", + "ref_data_full_path: demo_data/obs4MIPs_PCMDI_clims/rlut/CERES-EBAF-4-1/v20210804/rlut_mon_CERES-EBAF-4-1_PCMDI_gn.200301-201812.AC.v20210804.nc\n", + "-----------------------\n", + "model, run: ACCESS1-0 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.ACCESS1-0.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: Global\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "-----------------------\n", + "model, run: CanCM4 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.CanCM4.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: Global\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "pmp mean clim driver completed\n" ] } ], "source": [ "%%bash\n", - "mean_climate_driver.py -p basic_param.py --case_id 'Ex2' --regrid_tool 'esmf'" + "mean_climate_driver.py -p basic_param.py --case_id 'Ex2' --regrid_tool 'xesmf'" ] }, { @@ -1164,7 +1432,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 8, "metadata": { "scrolled": true }, @@ -1173,19 +1441,96 @@ "name": "stderr", "output_type": "stream", "text": [ - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex3:: REGION: Global\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex3:: alternate1 is an obs\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex3:: Could not figure out obs mask name from obs json file\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex3:: TEST DATA IS: ACCESS1-0\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex3:: ACCESS1-0 is a model\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex3:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex3/rlut_2.5x2.5_regrid2_linear_metrics\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex3:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex3/rlut_2.5x2.5_regrid2_linear_metrics.json\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex3:: default is an obs\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex3:: Could not figure out obs mask name from obs json file\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex3:: TEST DATA IS: ACCESS1-0\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex3:: ACCESS1-0 is a model\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex3:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex3/rlut_2.5x2.5_regrid2_linear_metrics\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex3:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex3/rlut_2.5x2.5_regrid2_linear_metrics.json\n" + "2023-02-21 14:22:54,655 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:23::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex3/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-21 14:23:18,559 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex3/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-21 14:23:20,370 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:23::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex3/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "2023-02-21 14:23:44,270 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex3/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "INFO::2023-02-21 14:23::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex3/rlut_2.5x2.5_regrid2_metrics.json\n", + "2023-02-21 14:23:44,273 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex3/rlut_2.5x2.5_regrid2_metrics.json\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "case_id: Ex3 \n", + " test_data_set: ['ACCESS1-0'] \n", + " realization: \n", + " vars: ['rlut'] \n", + " reference_data_set: ['all'] \n", + " target_grid: 2.5x2.5 \n", + " regrid_tool: regrid2 \n", + " regrid_tool_ocn: esmf \n", + " save_test_clims: False \n", + " test_clims_interpolated_output: None \n", + " filename_template: cmip5.historical.%(model_version).r1i1p1.mon.%(variable).198101-200512.AC.v20200426.nc \n", + " sftlf_filename_template: sftlf_%(model_version).nc \n", + " generate_sftlf: True \n", + " regions_specs: {'NHEX': {'domain': {'latitude': (30.0, 90)}}, 'SHEX': {'domain': {'latitude': (-90.0, -30)}}, 'TROPICS': {'domain': {'latitude': (-30.0, 30)}}, 'global': {}, '90S50S': {'domain': {'latitude': (-90.0, -50)}}, '50S20S': {'domain': {'latitude': (-50.0, -20)}}, '20S20N': {'domain': {'latitude': (-20.0, 20)}}, '20N50N': {'domain': {'latitude': (20.0, 50)}}, '50N90N': {'domain': {'latitude': (50.0, 90)}}, 'land_NHEX': {'value': 100, 'domain': {'latitude': (30.0, 90)}}, 'land_SHEX': {'value': 100, 'domain': {'latitude': (-90.0, -30)}}, 'land_TROPICS': {'value': 100, 'domain': {'latitude': (-30.0, 30)}}, 'land': {'value': 100}, 'ocean_NHEX': {'value': 0, 'domain': {'latitude': (30.0, 90)}}, 'ocean_SHEX': {'value': 0, 'domain': {'latitude': (-90.0, -30)}}, 'ocean_TROPICS': {'value': 0, 'domain': {'latitude': (30.0, 30)}}, 'ocean': {'value': 0}, 'CONUS': {'domain': {'latitude': (24.7, 49.4), 'longitude': (-124.78, -66.92)}}, 'land_CONUS': {'value': 100, 'domain': {'latitude': (24.7, 49.4), 'longitude': (-124.78, -66.92)}}, 'NAM': {'domain': {'latitude': (20.0, 90), 'longitude': (-180, 180)}}, 'NAO': {'domain': {'latitude': (20.0, 80), 'longitude': (-90, 40)}}, 'SAM': {'domain': {'latitude': (-20.0, -90), 'longitude': (0, 360)}}, 'PNA': {'domain': {'latitude': (20.0, 85), 'longitude': (120, 240)}}, 'PDO': {'domain': {'latitude': (20.0, 70), 'longitude': (110, 260)}}, 'AllMW': {'domain': {'latitude': (-40.0, 45.0), 'longitude': (0.0, 360.0)}}, 'AllM': {'domain': {'latitude': (-45.0, 45.0), 'longitude': (0.0, 360.0)}}, 'NAMM': {'domain': {'latitude': (0.0, 45.0), 'longitude': (210.0, 310.0)}}, 'SAMM': {'domain': {'latitude': (-45.0, 0.0), 'longitude': (240.0, 330.0)}}, 'NAFM': {'domain': {'latitude': (0.0, 45.0), 'longitude': (310.0, 60.0)}}, 'SAFM': {'domain': {'latitude': (-45.0, 0.0), 'longitude': (0.0, 90.0)}}, 'ASM': {'domain': {'latitude': (0.0, 45.0), 'longitude': (60.0, 180.0)}}, 'AUSM': {'domain': {'latitude': (-45.0, 0.0), 'longitude': (90.0, 160.0)}}, 'AIR': {'domain': {'latitude': (7.0, 25.0), 'longitude': (65.0, 85.0)}}, 'AUS': {'domain': {'latitude': (-20.0, -10.0), 'longitude': (120.0, 150.0)}}, 'Sahel': {'domain': {'latitude': (13.0, 18.0), 'longitude': (-10.0, 10.0)}}, 'GoG': {'domain': {'latitude': (0.0, 5.0), 'longitude': (-10.0, 10.0)}}, 'NAmo': {'domain': {'latitude': (20.0, 37.0), 'longitude': (-112.0, -103.0)}}, 'SAmo': {'domain': {'latitude': (-20.0, 2.5), 'longitude': (-65.0, -40.0)}}} \n", + " regions: {'rlut': ['Global']} \n", + " test_data_path: demo_data/CMIP5_demo_clims/ \n", + " reference_data_path: demo_data/obs4MIPs_PCMDI_clims \n", + " metrics_output_path: demo_output/Ex3 \n", + " debug: False \n", + "\n", + "--- prepare mean climate metrics calculation ---\n", + "--- start mean climate metrics calculation ---\n", + "varname: rlut\n", + "level: None\n", + "reference_data_set (all): ['alternate1', 'default']\n", + "ref: alternate1\n", + "ref_data_full_path: demo_data/obs4MIPs_PCMDI_clims/rlut/CERES-EBAF-4-0/v20210804/rlut_mon_CERES-EBAF-4-0_PCMDI_gn.200301-201812.AC.v20210804.nc\n", + "-----------------------\n", + "model, run: ACCESS1-0 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.ACCESS1-0.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: Global\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "ref: default\n", + "ref_data_full_path: demo_data/obs4MIPs_PCMDI_clims/rlut/CERES-EBAF-4-1/v20210804/rlut_mon_CERES-EBAF-4-1_PCMDI_gn.200301-201812.AC.v20210804.nc\n", + "-----------------------\n", + "model, run: ACCESS1-0 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.ACCESS1-0.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: Global\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "pmp mean clim driver completed\n" ] } ], @@ -1199,53 +1544,175 @@ "metadata": {}, "source": [ "### Using custom regions \n", - "This example specifies additional regions for the analysis. The predefined regions that can be set by the `--regions` flag can be found in [default_regions.py](https://github.com/PCMDI/pcmdi_metrics/blob/master/share/default_regions.py). By default, the mean climate driver will run \"Global\", \"NHEX\", \"SHEX\", and \"Tropics\"." + "This example specifies additional regions for the analysis. The predefined regions that can be set by the `--regions` flag can be found in [default_regions.py](https://github.com/PCMDI/pcmdi_metrics/blob/master/share/default_regions.py). By default, the mean climate driver will run \"global\", \"NHEX\", \"SHEX\", and \"TROPICS\"." ] }, { "cell_type": "code", - "execution_count": 7, - "metadata": {}, + "execution_count": 9, + "metadata": { + "scrolled": true, + "tags": [] + }, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex4:: REGION: land\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex4:: alternate1 is an obs\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex4:: Could not figure out obs mask name from obs json file\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex4:: TEST DATA IS: ACCESS1-0\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex4:: ACCESS1-0 is a model\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex4:: Auto generating sftlf for model /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_data/CMIP5_demo_clims/cmip5.historical.ACCESS1-0.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", - "INFO::2021-11-10 17:03::pcmdi_metrics::Ex4:: Auto generated sftlf for model ACCESS1-0\n", - "/opt/anaconda3/envs/pcmdi_metrics_dev/lib/python3.9/site-packages/numpy/ma/core.py:1015: RuntimeWarning: overflow encountered in multiply\n", - " result = self.f(da, db, *args, **kwargs)\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex4:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut_2.5x2.5_regrid2_linear_metrics\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex4:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut_2.5x2.5_regrid2_linear_metrics.json\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex4:: TEST DATA IS: CanCM4\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex4:: CanCM4 is a model\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex4:: Auto generating sftlf for model /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_data/CMIP5_demo_clims/cmip5.historical.CanCM4.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex4:: Auto generated sftlf for model CanCM4\n", - "/opt/anaconda3/envs/pcmdi_metrics_dev/lib/python3.9/site-packages/numpy/ma/core.py:1015: RuntimeWarning: overflow encountered in multiply\n", - " result = self.f(da, db, *args, **kwargs)\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex4:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut_2.5x2.5_regrid2_linear_metrics\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex4:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut_2.5x2.5_regrid2_linear_metrics.json\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex4:: default is an obs\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex4:: Could not figure out obs mask name from obs json file\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex4:: TEST DATA IS: ACCESS1-0\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex4:: ACCESS1-0 is a model\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex4:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut_2.5x2.5_regrid2_linear_metrics\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex4:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut_2.5x2.5_regrid2_linear_metrics.json\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex4:: TEST DATA IS: CanCM4\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex4:: CanCM4 is a model\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex4:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut_2.5x2.5_regrid2_linear_metrics\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex4:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut_2.5x2.5_regrid2_linear_metrics.json\n" + "2023-02-21 14:23:50,755 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:24::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-21 14:24:14,525 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-21 14:24:14,547 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:24::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-21 14:24:35,676 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-21 14:24:37,325 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:25::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "2023-02-21 14:25:02,327 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "2023-02-21 14:25:02,348 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:25::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "2023-02-21 14:25:26,158 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "INFO::2023-02-21 14:25::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut_2.5x2.5_regrid2_metrics.json\n", + "2023-02-21 14:25:26,171 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex4/rlut_2.5x2.5_regrid2_metrics.json\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "case_id: Ex4 \n", + " test_data_set: ['ACCESS1-0', 'CanCM4'] \n", + " realization: \n", + " vars: ['rlut'] \n", + " reference_data_set: ['all'] \n", + " target_grid: 2.5x2.5 \n", + " regrid_tool: regrid2 \n", + " regrid_tool_ocn: esmf \n", + " save_test_clims: False \n", + " test_clims_interpolated_output: None \n", + " filename_template: cmip5.historical.%(model_version).r1i1p1.mon.%(variable).198101-200512.AC.v20200426.nc \n", + " sftlf_filename_template: sftlf_%(model_version).nc \n", + " generate_sftlf: True \n", + " regions_specs: {'MyDomain': {'domain': {'latitude': (20.0, 30)}}} \n", + " regions: {'rlut': ['MyDomain']} \n", + " test_data_path: demo_data/CMIP5_demo_clims/ \n", + " reference_data_path: demo_data/obs4MIPs_PCMDI_clims \n", + " metrics_output_path: demo_output/Ex4 \n", + " debug: False \n", + "\n", + "--- prepare mean climate metrics calculation ---\n", + "--- start mean climate metrics calculation ---\n", + "varname: rlut\n", + "level: None\n", + "reference_data_set (all): ['alternate1', 'default']\n", + "ref: alternate1\n", + "ref_data_full_path: demo_data/obs4MIPs_PCMDI_clims/rlut/CERES-EBAF-4-0/v20210804/rlut_mon_CERES-EBAF-4-0_PCMDI_gn.200301-201812.AC.v20210804.nc\n", + "-----------------------\n", + "model, run: ACCESS1-0 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.ACCESS1-0.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: MyDomain\n", + "spatial subset done\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "-----------------------\n", + "model, run: CanCM4 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.CanCM4.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: MyDomain\n", + "spatial subset done\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "ref: default\n", + "ref_data_full_path: demo_data/obs4MIPs_PCMDI_clims/rlut/CERES-EBAF-4-1/v20210804/rlut_mon_CERES-EBAF-4-1_PCMDI_gn.200301-201812.AC.v20210804.nc\n", + "-----------------------\n", + "model, run: ACCESS1-0 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.ACCESS1-0.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: MyDomain\n", + "spatial subset done\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "-----------------------\n", + "model, run: CanCM4 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.CanCM4.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: MyDomain\n", + "spatial subset done\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "pmp mean clim driver completed\n" ] } ], "source": [ "%%bash\n", - "mean_climate_driver.py -p basic_param.py --case_id 'Ex4' --regions '{\"rlut\": [\"land\"]}'" + "mean_climate_driver.py -p basic_param.py \\\n", + "--case_id 'Ex4' --regions '{\"rlut\": [\"MyDomain\"]}' \\\n", + "--regions_specs \"{'MyDomain': {'domain': {'latitude': (20.0, 30)}}}\"" ] }, { @@ -1259,7 +1726,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "It is not currently possible to edit the region definitions from the command line. This is controlled by the variable `regions_specs` in the parameter file. For example, a custom region for Antarctica could be defined with `regions_specs = {'ANT': {'value': 100, 'domain': cdutil.region.domain(latitude=(-60, -90))}}` in the parameter file. \n", + "Custom domain is controlled by the variable `regions_specs` in the parameter file or command line. For example, a custom region for Antarctica over land could be defined with `regions_specs = {'ANT': {'value': 100, 'domain': {'latitude': (-90, -60)}}}` in the parameter file. `value` indicate masking, 100: land only, 0: ocean only.\n", "\n", "The command to use the custom region would look like `--regions '{\"rlut\": [\"ANT\"]}'` in the command line or `regions = {\"rlut\": [\"ANT\"]}` in the parameter file." ] @@ -1286,52 +1753,141 @@ }, { "cell_type": "code", - "execution_count": 8, - "metadata": {}, + "execution_count": 10, + "metadata": { + "scrolled": true, + "tags": [] + }, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: REGION: global\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: alternate1 is an obs\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: Could not figure out obs mask name from obs json file\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: TEST DATA IS: CanCM4\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: CanCM4 is a model\n", - "/opt/anaconda3/envs/pcmdi_metrics_dev/lib/python3.9/site-packages/cdms2/fvariable.py:103: FutureWarning: Using a non-tuple sequence for multidimensional indexing is deprecated; use `arr[tuple(seq)]` instead of `arr[seq]`. In the future this will be interpreted as an array index, `arr[np.array(seq)]`, which will result either in an error or a different result.\n", - " result = result[revlist]\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/zg-500_2.5x2.5_regrid2_linear_metrics\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/zg-500_2.5x2.5_regrid2_linear_metrics.json\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: REGION: NHEX\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: alternate1 is an obs\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: Could not figure out obs mask name from obs json file\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: TEST DATA IS: CanCM4\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: CanCM4 is a model\n", - "/opt/anaconda3/envs/pcmdi_metrics_dev/lib/python3.9/site-packages/cdms2/fvariable.py:103: FutureWarning: Using a non-tuple sequence for multidimensional indexing is deprecated; use `arr[tuple(seq)]` instead of `arr[seq]`. In the future this will be interpreted as an array index, `arr[np.array(seq)]`, which will result either in an error or a different result.\n", - " result = result[revlist]\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/zg-500_2.5x2.5_regrid2_linear_metrics\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/zg-500_2.5x2.5_regrid2_linear_metrics.json\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: REGION: SHEX\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: alternate1 is an obs\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: Could not figure out obs mask name from obs json file\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: TEST DATA IS: CanCM4\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: CanCM4 is a model\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/zg-500_2.5x2.5_regrid2_linear_metrics\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/zg-500_2.5x2.5_regrid2_linear_metrics.json\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: REGION: TROPICS\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: alternate1 is an obs\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: Could not figure out obs mask name from obs json file\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: TEST DATA IS: CanCM4\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: CanCM4 is a model\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/zg-500_2.5x2.5_regrid2_linear_metrics\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex6:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/zg-500_2.5x2.5_regrid2_linear_metrics.json\n" + "2023-02-21 14:25:33,713 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:27::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/zg_500/CanCM4_zg_500_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-21 14:27:19,315 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/zg_500/CanCM4_zg_500_2.5x2.5_regrid2_metrics_alternate1.json\n", + "INFO::2023-02-21 14:27::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/zg_500_2.5x2.5_regrid2_metrics.json\n", + "2023-02-21 14:27:29,314 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/zg_500_2.5x2.5_regrid2_metrics.json\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "case_id: Ex5 \n", + " test_data_set: ['CanCM4'] \n", + " realization: \n", + " vars: ['zg_500'] \n", + " reference_data_set: ['alternate1'] \n", + " target_grid: 2.5x2.5 \n", + " regrid_tool: regrid2 \n", + " regrid_tool_ocn: esmf \n", + " save_test_clims: False \n", + " test_clims_interpolated_output: None \n", + " filename_template: cmip5.historical.%(model_version).r1i1p1.mon.%(variable).198101-200512.AC.v20200426.nc \n", + " sftlf_filename_template: sftlf_%(model_version).nc \n", + " generate_sftlf: True \n", + " regions_specs: {'NHEX': {'domain': {'latitude': (30.0, 90)}}, 'SHEX': {'domain': {'latitude': (-90.0, -30)}}, 'TROPICS': {'domain': {'latitude': (-30.0, 30)}}, 'global': {}, '90S50S': {'domain': {'latitude': (-90.0, -50)}}, '50S20S': {'domain': {'latitude': (-50.0, -20)}}, '20S20N': {'domain': {'latitude': (-20.0, 20)}}, '20N50N': {'domain': {'latitude': (20.0, 50)}}, '50N90N': {'domain': {'latitude': (50.0, 90)}}, 'land_NHEX': {'value': 100, 'domain': {'latitude': (30.0, 90)}}, 'land_SHEX': {'value': 100, 'domain': {'latitude': (-90.0, -30)}}, 'land_TROPICS': {'value': 100, 'domain': {'latitude': (-30.0, 30)}}, 'land': {'value': 100}, 'ocean_NHEX': {'value': 0, 'domain': {'latitude': (30.0, 90)}}, 'ocean_SHEX': {'value': 0, 'domain': {'latitude': (-90.0, -30)}}, 'ocean_TROPICS': {'value': 0, 'domain': {'latitude': (30.0, 30)}}, 'ocean': {'value': 0}, 'CONUS': {'domain': {'latitude': (24.7, 49.4), 'longitude': (-124.78, -66.92)}}, 'land_CONUS': {'value': 100, 'domain': {'latitude': (24.7, 49.4), 'longitude': (-124.78, -66.92)}}, 'NAM': {'domain': {'latitude': (20.0, 90), 'longitude': (-180, 180)}}, 'NAO': {'domain': {'latitude': (20.0, 80), 'longitude': (-90, 40)}}, 'SAM': {'domain': {'latitude': (-20.0, -90), 'longitude': (0, 360)}}, 'PNA': {'domain': {'latitude': (20.0, 85), 'longitude': (120, 240)}}, 'PDO': {'domain': {'latitude': (20.0, 70), 'longitude': (110, 260)}}, 'AllMW': {'domain': {'latitude': (-40.0, 45.0), 'longitude': (0.0, 360.0)}}, 'AllM': {'domain': {'latitude': (-45.0, 45.0), 'longitude': (0.0, 360.0)}}, 'NAMM': {'domain': {'latitude': (0.0, 45.0), 'longitude': (210.0, 310.0)}}, 'SAMM': {'domain': {'latitude': (-45.0, 0.0), 'longitude': (240.0, 330.0)}}, 'NAFM': {'domain': {'latitude': (0.0, 45.0), 'longitude': (310.0, 60.0)}}, 'SAFM': {'domain': {'latitude': (-45.0, 0.0), 'longitude': (0.0, 90.0)}}, 'ASM': {'domain': {'latitude': (0.0, 45.0), 'longitude': (60.0, 180.0)}}, 'AUSM': {'domain': {'latitude': (-45.0, 0.0), 'longitude': (90.0, 160.0)}}, 'AIR': {'domain': {'latitude': (7.0, 25.0), 'longitude': (65.0, 85.0)}}, 'AUS': {'domain': {'latitude': (-20.0, -10.0), 'longitude': (120.0, 150.0)}}, 'Sahel': {'domain': {'latitude': (13.0, 18.0), 'longitude': (-10.0, 10.0)}}, 'GoG': {'domain': {'latitude': (0.0, 5.0), 'longitude': (-10.0, 10.0)}}, 'NAmo': {'domain': {'latitude': (20.0, 37.0), 'longitude': (-112.0, -103.0)}}, 'SAmo': {'domain': {'latitude': (-20.0, 2.5), 'longitude': (-65.0, -40.0)}}} \n", + " regions: {'rlut': ['Global']} \n", + " test_data_path: demo_data/CMIP5_demo_clims/ \n", + " reference_data_path: demo_data/obs4MIPs_PCMDI_clims \n", + " metrics_output_path: demo_output/Ex5 \n", + " debug: False \n", + "\n", + "--- prepare mean climate metrics calculation ---\n", + "--- start mean climate metrics calculation ---\n", + "varname: zg\n", + "level: 500.0\n", + "ref: alternate1\n", + "ref_data_full_path: demo_data/obs4MIPs_PCMDI_clims/zg/ERA-INT/v20210804/zg_mon_ERA-INT_PCMDI_gn.200301-201812.AC.v20210804.nc\n", + "-----------------------\n", + "model, run: CanCM4 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.CanCM4.r1i1p1.mon.zg.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: global\n", + "compute metrics start\n", + "var: zg\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "region: NHEX\n", + "spatial subset done\n", + "compute metrics start\n", + "var: zg\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "region: SHEX\n", + "spatial subset done\n", + "compute metrics start\n", + "var: zg\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "region: TROPICS\n", + "spatial subset done\n", + "compute metrics start\n", + "var: zg\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "pmp mean clim driver completed\n" ] } ], "source": [ "%%bash\n", "mean_climate_driver.py -p basic_param.py \\\n", - "--case_id 'Ex6' \\\n", + "--case_id 'Ex5' \\\n", "--vars 'zg_500' \\\n", "--test_data_set 'CanCM4' \\\n", "--reference_data_set \"alternate1\"" @@ -1354,7 +1910,7 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 11, "metadata": { "scrolled": true }, @@ -1363,37 +1919,155 @@ "name": "stderr", "output_type": "stream", "text": [ - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: REGION: Global\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: alternate1 is an obs\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: Could not figure out obs mask name from obs json file\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: TEST DATA IS: ACCESS1-0\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: ACCESS1-0 is a model\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: Saving interpolated climatologies to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/Global\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: Results saved to a nc file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/Global.nc\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/rlut_2.5x2.5_regrid2_linear_metrics\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/rlut_2.5x2.5_regrid2_linear_metrics.json\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: TEST DATA IS: CanCM4\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: CanCM4 is a model\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: Saving interpolated climatologies to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/Global\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: Results saved to a nc file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/Global.nc\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/rlut_2.5x2.5_regrid2_linear_metrics\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/rlut_2.5x2.5_regrid2_linear_metrics.json\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: default is an obs\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: Could not figure out obs mask name from obs json file\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: TEST DATA IS: ACCESS1-0\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: ACCESS1-0 is a model\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/rlut_2.5x2.5_regrid2_linear_metrics\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/rlut_2.5x2.5_regrid2_linear_metrics.json\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: TEST DATA IS: CanCM4\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: CanCM4 is a model\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: Saving results to: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/rlut_2.5x2.5_regrid2_linear_metrics\n", - "INFO::2021-11-10 17:04::pcmdi_metrics::Ex5:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20211109/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex5/rlut_2.5x2.5_regrid2_linear_metrics.json\n" + "2023-02-21 14:27:36,115 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:28::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-21 14:28:06,336 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-21 14:28:06,360 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:28::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-21 14:28:37,819 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_alternate1.json\n", + "2023-02-21 14:28:39,744 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:29::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "2023-02-21 14:29:13,750 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/rlut/ACCESS1-0_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "2023-02-21 14:29:13,771 [WARNING]: dataset.py(_is_decodable:474) >> 'time' does not have a 'units' attribute set so it could not be decoded. Try setting the 'units' attribute (`ds.{coords.name}.attrs['units']`) and try decoding again.\n", + "INFO::2023-02-21 14:29::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "2023-02-21 14:29:47,125 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/rlut/CanCM4_rlut_2.5x2.5_regrid2_metrics_default.json\n", + "INFO::2023-02-21 14:29::pcmdi_metrics:: Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/rlut_2.5x2.5_regrid2_metrics.json\n", + "2023-02-21 14:29:57,273 [INFO]: base.py(write:245) >> Results saved to a json file: /Users/lee1043/Documents/Research/git/pcmdi_metrics_20221013_pcmdi/pcmdi_metrics/doc/jupyter/Demo/demo_output/Ex6/rlut_2.5x2.5_regrid2_metrics.json\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "case_id: Ex6 \n", + " test_data_set: ['ACCESS1-0', 'CanCM4'] \n", + " realization: \n", + " vars: ['rlut'] \n", + " reference_data_set: ['all'] \n", + " target_grid: 2.5x2.5 \n", + " regrid_tool: regrid2 \n", + " regrid_tool_ocn: esmf \n", + " save_test_clims: True \n", + " test_clims_interpolated_output: None \n", + " filename_template: cmip5.historical.%(model_version).r1i1p1.mon.%(variable).198101-200512.AC.v20200426.nc \n", + " sftlf_filename_template: sftlf_%(model_version).nc \n", + " generate_sftlf: True \n", + " regions_specs: {'NHEX': {'domain': {'latitude': (30.0, 90)}}, 'SHEX': {'domain': {'latitude': (-90.0, -30)}}, 'TROPICS': {'domain': {'latitude': (-30.0, 30)}}, 'global': {}, '90S50S': {'domain': {'latitude': (-90.0, -50)}}, '50S20S': {'domain': {'latitude': (-50.0, -20)}}, '20S20N': {'domain': {'latitude': (-20.0, 20)}}, '20N50N': {'domain': {'latitude': (20.0, 50)}}, '50N90N': {'domain': {'latitude': (50.0, 90)}}, 'land_NHEX': {'value': 100, 'domain': {'latitude': (30.0, 90)}}, 'land_SHEX': {'value': 100, 'domain': {'latitude': (-90.0, -30)}}, 'land_TROPICS': {'value': 100, 'domain': {'latitude': (-30.0, 30)}}, 'land': {'value': 100}, 'ocean_NHEX': {'value': 0, 'domain': {'latitude': (30.0, 90)}}, 'ocean_SHEX': {'value': 0, 'domain': {'latitude': (-90.0, -30)}}, 'ocean_TROPICS': {'value': 0, 'domain': {'latitude': (30.0, 30)}}, 'ocean': {'value': 0}, 'CONUS': {'domain': {'latitude': (24.7, 49.4), 'longitude': (-124.78, -66.92)}}, 'land_CONUS': {'value': 100, 'domain': {'latitude': (24.7, 49.4), 'longitude': (-124.78, -66.92)}}, 'NAM': {'domain': {'latitude': (20.0, 90), 'longitude': (-180, 180)}}, 'NAO': {'domain': {'latitude': (20.0, 80), 'longitude': (-90, 40)}}, 'SAM': {'domain': {'latitude': (-20.0, -90), 'longitude': (0, 360)}}, 'PNA': {'domain': {'latitude': (20.0, 85), 'longitude': (120, 240)}}, 'PDO': {'domain': {'latitude': (20.0, 70), 'longitude': (110, 260)}}, 'AllMW': {'domain': {'latitude': (-40.0, 45.0), 'longitude': (0.0, 360.0)}}, 'AllM': {'domain': {'latitude': (-45.0, 45.0), 'longitude': (0.0, 360.0)}}, 'NAMM': {'domain': {'latitude': (0.0, 45.0), 'longitude': (210.0, 310.0)}}, 'SAMM': {'domain': {'latitude': (-45.0, 0.0), 'longitude': (240.0, 330.0)}}, 'NAFM': {'domain': {'latitude': (0.0, 45.0), 'longitude': (310.0, 60.0)}}, 'SAFM': {'domain': {'latitude': (-45.0, 0.0), 'longitude': (0.0, 90.0)}}, 'ASM': {'domain': {'latitude': (0.0, 45.0), 'longitude': (60.0, 180.0)}}, 'AUSM': {'domain': {'latitude': (-45.0, 0.0), 'longitude': (90.0, 160.0)}}, 'AIR': {'domain': {'latitude': (7.0, 25.0), 'longitude': (65.0, 85.0)}}, 'AUS': {'domain': {'latitude': (-20.0, -10.0), 'longitude': (120.0, 150.0)}}, 'Sahel': {'domain': {'latitude': (13.0, 18.0), 'longitude': (-10.0, 10.0)}}, 'GoG': {'domain': {'latitude': (0.0, 5.0), 'longitude': (-10.0, 10.0)}}, 'NAmo': {'domain': {'latitude': (20.0, 37.0), 'longitude': (-112.0, -103.0)}}, 'SAmo': {'domain': {'latitude': (-20.0, 2.5), 'longitude': (-65.0, -40.0)}}} \n", + " regions: {'rlut': ['Global']} \n", + " test_data_path: demo_data/CMIP5_demo_clims/ \n", + " reference_data_path: demo_data/obs4MIPs_PCMDI_clims \n", + " metrics_output_path: demo_output/Ex6 \n", + " debug: False \n", + "\n", + "--- prepare mean climate metrics calculation ---\n", + "--- start mean climate metrics calculation ---\n", + "varname: rlut\n", + "level: None\n", + "reference_data_set (all): ['alternate1', 'default']\n", + "ref: alternate1\n", + "ref_data_full_path: demo_data/obs4MIPs_PCMDI_clims/rlut/CERES-EBAF-4-0/v20210804/rlut_mon_CERES-EBAF-4-0_PCMDI_gn.200301-201812.AC.v20210804.nc\n", + "-----------------------\n", + "model, run: ACCESS1-0 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.ACCESS1-0.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: Global\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "-----------------------\n", + "model, run: CanCM4 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.CanCM4.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: Global\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "ref: default\n", + "ref_data_full_path: demo_data/obs4MIPs_PCMDI_clims/rlut/CERES-EBAF-4-1/v20210804/rlut_mon_CERES-EBAF-4-1_PCMDI_gn.200301-201812.AC.v20210804.nc\n", + "-----------------------\n", + "model, run: ACCESS1-0 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.ACCESS1-0.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: Global\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "-----------------------\n", + "model, run: CanCM4 \n", + "test_data (model in this case) full_path: demo_data/CMIP5_demo_clims/cmip5.historical.CanCM4.r1i1p1.mon.rlut.198101-200512.AC.v20200426.nc\n", + "[WARNING]: calendar info mismatch. ds.time.attrs[\"calendar\"] is adjusted to ds.calendar\n", + "load and regrid done\n", + "region: Global\n", + "compute metrics start\n", + "var: rlut\n", + "compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD\n", + "compute_metrics, rms_xyt\n", + "compute_metrics, stdObs_xyt\n", + "compute_metrics, std_xyt\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL MEAN BIAS\n", + "compute_metrics-CALCULATE MSE\n", + "compute_metrics-CALCULATE MEAN RMS\n", + "compute_metrics-CALCULATE MEAN CORR\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD STD\n", + "compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN\n", + "compute_metrics-CALCULATE ANNUAL MEANS\n", + "compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS\n", + "compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD\n", + "pmp mean clim driver completed\n" ] } ], "source": [ "%%bash \n", - "mean_climate_driver.py -p basic_param.py --case_id 'Ex5' --user_notes 'Example note' --save_test_clims True" + "mean_climate_driver.py -p basic_param.py \\\n", + "--case_id 'Ex6' --user_notes 'Example note' --save_test_clims True" ] }, { @@ -1411,9 +2085,9 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "Python [conda env:pmp_devel_20230218] *", "language": "python", - "name": "python3" + "name": "conda-env-pmp_devel_20230218-py" }, "language_info": { "codemirror_mode": { diff --git a/pcmdi_metrics/__init__.py b/pcmdi_metrics/__init__.py index 92933f209..c9533b8d3 100644 --- a/pcmdi_metrics/__init__.py +++ b/pcmdi_metrics/__init__.py @@ -15,5 +15,6 @@ plog.addHandler(ch) plog.setLevel(LOG_LEVEL) from . import io # noqa -from . import pcmdi # noqa +#from . import pcmdi # noqa +#from . import mean_climate # noqa from .version import __git_sha1__, __git_tag_describe__, __version__ # noqa diff --git a/pcmdi_metrics/io/__init__.py b/pcmdi_metrics/io/__init__.py index d52b46a38..9822a2637 100644 --- a/pcmdi_metrics/io/__init__.py +++ b/pcmdi_metrics/io/__init__.py @@ -1,3 +1,6 @@ # init for pcmdi_metrics.io +from .xcdat_openxml import xcdat_open # noqa # isort:skip from . import base # noqa from .base import MV2Json # noqa +from .default_regions_define import load_regions_specs # noqa +from .default_regions_define import region_subset # noqa diff --git a/pcmdi_metrics/io/base.py b/pcmdi_metrics/io/base.py index feb4cecee..071602cd7 100755 --- a/pcmdi_metrics/io/base.py +++ b/pcmdi_metrics/io/base.py @@ -14,9 +14,11 @@ import genutil import MV2 import numpy +import xcdat import pcmdi_metrics from pcmdi_metrics import LOG_LEVEL +from pcmdi_metrics.io import xcdat_open value = 0 cdms2.setNetcdfShuffleFlag(value) # where value is either 0 or 1 @@ -80,7 +82,7 @@ def update_dict(d, u): def generateProvenance(): - extra_pairs = {"matplotlib": "matplotlib ", "scipy": "scipy"} + extra_pairs = {"matplotlib": "matplotlib ", "scipy": "scipy", "xcdat": "xcdat", "xarray": "xarray"} prov = cdat_info.generateProvenance(extra_pairs=extra_pairs) prov["packages"]["PMP"] = pcmdi_metrics.version.__git_tag_describe__ prov["packages"][ @@ -164,6 +166,7 @@ def write( type="json", mode="w", include_YAML=False, + include_history=False, include_script=False, *args, **kwargs, @@ -174,7 +177,7 @@ def write( if not os.path.exists(dir_path): try: - os.makedirs(dir_path) + os.makedirs(dir_path, exist_ok=True) except Exception: logging.getLogger("pcmdi_metrics").error( "Could not create output directory: %s" % dir_path @@ -213,7 +216,9 @@ def write( if not include_script: if "script" in out_dict["provenance"].keys(): del out_dict["provenance"]["script"] - + if not include_history: + if "history" in out_dict["provenance"].keys(): + del out_dict["provenance"]["history"] json.dump(out_dict, f, cls=CDMSDomainsEncoder, *args, **kwargs) f.close() @@ -224,11 +229,14 @@ def write( f.close() elif self.type == "nc": + """ f = cdms2.open(file_name, "w") f.write(data, *args, **kwargs) f.metrics_git_sha1 = pcmdi_metrics.__git_sha1__ f.uvcdat_version = cdat_info.get_version() f.close() + """ + data.to_netcdf(file_name) else: logging.getLogger("pcmdi_metrics").error("Unknown type: %s" % type) @@ -352,7 +360,7 @@ def get(self, var, var_in_file=None, region={}, *args, **kwargs): if self.is_masking(): self.var_from_file = self.mask_var(self.var_from_file) - self.var_from_file = self.set_target_grid_and_mask_in_var(self.var_from_file) + self.var_from_file = self.set_target_grid_and_mask_in_var(self.var_from_file, var) self.var_from_file = self.set_domain_in_var(self.var_from_file, self.region) @@ -361,13 +369,18 @@ def get(self, var, var_in_file=None, region={}, *args, **kwargs): def extract_var_from_file(self, var, var_in_file, *args, **kwargs): if var_in_file is None: var_in_file = var - # self.extension = 'nc' - var_file = cdms2.open(self(), "r") - for att in ["var_in_file,", "varInFile"]: - if att in kwargs: - del kwargs[att] - extracted_var = var_file(var_in_file, *args, **kwargs) - var_file.close() + + try: + ds = xcdat_open(self(), data_var=var_in_file, decode_times=True) + except Exception: + ds = xcdat_open(self(), data_var=var_in_file, decode_times=False) # Temporary part to read in cdms written obs4MIP AC files + + if 'level' in list(kwargs.keys()): + level = kwargs['level'] + ds = ds.sel(plev=level) + + extracted_var = ds + return extracted_var def is_masking(self): @@ -377,46 +390,51 @@ def is_masking(self): return False def mask_var(self, var): + """ + self: + var: + """ + var_shape = tuple(var.dims[d] for d in ['lat', 'lon']) + if self.mask is None: self.set_file_mask_template() self.mask = self.get_mask_from_var(var) - if self.mask.shape != var.shape: + # if self.mask.shape != var.shape: + if self.mask.shape != var_shape: dummy, mask = genutil.grower(var, self.mask) else: mask = self.target_mask mask = MV2.not_equal(mask, self.value) return MV2.masked_where(mask, var) - def set_target_grid_and_mask_in_var(self, var): + def set_target_grid_and_mask_in_var(self, var, var_in_file): + """ + self: object + self(): string, path to input file + """ if self.target_grid is not None: - var = var.regrid( - self.target_grid, - regridTool=self.regrid_tool, - regridMethod=self.regrid_method, - coordSys="deg", - diag={}, - periodicity=1, - ) - + var = var.regridder.horizontal(var_in_file, self.target_grid, tool=self.regrid_tool) if self.target_mask is not None: - if self.target_mask.shape != var.shape: + # if self.target_mask.shape != var.shape: + if self.target_mask.shape != var[var_in_file].shape: dummy, mask = genutil.grower(var, self.target_mask) else: mask = self.target_mask var = MV2.masked_where(mask, var) - return var def set_domain_in_var(self, var, region): - domain = region.get("domain", None) - if domain is not None: - if isinstance(domain, dict): - var = var(**domain) - elif isinstance(domain, (list, tuple)): - var = var(*domain) - elif isinstance(domain, cdms2.selectors.Selector): - domain.id = region.get("id", "region") - var = var(*[domain]) + """ + self: + var: + region: , e.g., {'domain': Selector(), 'id': 'NHEX'} + """ + region_id = region['id'] + from pcmdi_metrics.io import load_regions_specs, region_subset + regions_specs = load_regions_specs() + if region_id not in ['global', 'land', 'ocean']: + var = region_subset(var, regions_specs, region=region_id) + return var def set_file_mask_template(self): @@ -429,7 +447,8 @@ def set_file_mask_template(self): def get_mask_from_var(self, var): try: - o_mask = self.file_mask_template.get("sftlf") + # o_mask = self.file_mask_template.get("sftlf") + o_mask = self.file_mask_template.get("sftlf", var_in_file="sftlf") except Exception: o_mask = ( cdutil.generateLandSeaMask(var, regridTool=self.regrid_tool).filled(1.0) @@ -444,7 +463,8 @@ def set_target_grid(self, target, regrid_tool="esmf", regrid_method="linear"): self.regrid_tool = regrid_tool self.regrid_method = regrid_method if target == "2.5x2.5": - self.target_grid = cdms2.createUniformGrid(-88.875, 72, 2.5, 0, 144, 2.5) + # self.target_grid = cdms2.createUniformGrid(-88.875, 72, 2.5, 0, 144, 2.5) + self.target_grid = xcdat.create_uniform_grid(-88.875, 88.625, 2.5, 0, 357.5, 2.5) self.target_grid_name = target elif cdms2.isGrid(target): self.target_grid = target diff --git a/pcmdi_metrics/io/default_regions_define.py b/pcmdi_metrics/io/default_regions_define.py new file mode 100755 index 000000000..4511e2c43 --- /dev/null +++ b/pcmdi_metrics/io/default_regions_define.py @@ -0,0 +1,112 @@ +import xcdat as xc + + +def load_regions_specs(): + + regions_specs = { + # Mean Climate + "NHEX": {"domain": {"latitude": (30.0, 90)}}, + "SHEX": {"domain": {"latitude": (-90.0, -30)}}, + "TROPICS": {"domain": {"latitude": (-30.0, 30)}}, + "global": {}, + "90S50S": {"domain": {"latitude": (-90.0, -50)}}, + "50S20S": {"domain": {"latitude": (-50.0, -20)}}, + "20S20N": {"domain": {"latitude": (-20.0, 20)}}, + "20N50N": {"domain": {"latitude": (20.0, 50)}}, + "50N90N": {"domain": {"latitude": (50.0, 90)}}, + "land_NHEX": {"value": 100, "domain": {"latitude": (30.0, 90)}}, + "land_SHEX": {"value": 100, "domain": {"latitude": (-90.0, -30)}}, + "land_TROPICS": {"value": 100, "domain": {"latitude": (-30.0, 30)}}, + "land": {"value": 100}, + "ocean_NHEX": {"value": 0, "domain": {"latitude": (30.0, 90)}}, + "ocean_SHEX": {"value": 0, "domain": {"latitude": (-90.0, -30)}}, + "ocean_TROPICS": {"value": 0, "domain": {"latitude": (30.0, 30)}}, + "ocean": {"value": 0}, + "CONUS": {"domain": {"latitude": (24.7, 49.4), "longitude": (-124.78, -66.92)}}, + "land_CONUS": {"value": 100, "domain": {"latitude": (24.7, 49.4), "longitude": (-124.78, -66.92)}}, + # Modes of variability + "NAM": {"domain": {"latitude": (20.0, 90), "longitude": (-180, 180)}}, + "NAO": {"domain": {"latitude": (20.0, 80), "longitude": (-90, 40)}}, + "SAM": {"domain": {"latitude": (-20.0, -90), "longitude": (0, 360)}}, + "PNA": {"domain": {"latitude": (20.0, 85), "longitude": (120, 240)}}, + "PDO": {"domain": {"latitude": (20.0, 70), "longitude": (110, 260)}}, + # Monsoon domains for Wang metrics + # All monsoon domains + "AllMW": {"domain": {"latitude": (-40.0, 45.0), "longitude": (0.0, 360.0)}}, + "AllM": {"domain": {"latitude": (-45.0, 45.0), "longitude": (0.0, 360.0)}}, + # North American Monsoon + "NAMM": {"domain": {"latitude": (0.0, 45.0), "longitude": (210.0, 310.0)}}, + # South American Monsoon + "SAMM": {"domain": {"latitude": (-45.0, 0.0), "longitude": (240.0, 330.0)}}, + # North African Monsoon + "NAFM": {"domain": {"latitude": (0.0, 45.0), "longitude": (310.0, 60.0)}}, + # South African Monsoon + "SAFM": {"domain": {"latitude": (-45.0, 0.0), "longitude": (0.0, 90.0)}}, + # Asian Summer Monsoon + "ASM": {"domain": {"latitude": (0.0, 45.0), "longitude": (60.0, 180.0)}}, + # Australian Monsoon + "AUSM": {"domain": {"latitude": (-45.0, 0.0), "longitude": (90.0, 160.0)}}, + # Monsoon domains for Sperber metrics + # All India rainfall + "AIR": {"domain": {"latitude": (7.0, 25.0), "longitude": (65.0, 85.0)}}, + # North Australian + "AUS": {"domain": {"latitude": (-20.0, -10.0), "longitude": (120.0, 150.0)}}, + # Sahel + "Sahel": {"domain": {"latitude": (13.0, 18.0), "longitude": (-10.0, 10.0)}}, + # Gulf of Guinea + "GoG": {"domain": {"latitude": (0.0, 5.0), "longitude": (-10.0, 10.0)}}, + # North American monsoon + "NAmo": {"domain": {"latitude": (20.0, 37.0), "longitude": (-112.0, -103.0)}}, + # South American monsoon + "SAmo": {"domain": {"latitude": (-20.0, 2.5), "longitude": (-65.0, -40.0)}}, + } + + return regions_specs + + +def region_subset(ds, regions_specs, region=None): + """ + d: xarray.Dataset + regions_specs: dict + region: string + """ + + if ((region is None) or ((region is not None) and (region not in list(regions_specs.keys())))): + print('Error: region not defined') + else: + if 'domain' in list(regions_specs[region].keys()): + if 'latitude' in list(regions_specs[region]['domain'].keys()): + lat0 = regions_specs[region]['domain']['latitude'][0] + lat1 = regions_specs[region]['domain']['latitude'][1] + # proceed subset + if 'latitude' in (ds.coords.dims): + ds = ds.sel(latitude=slice(lat0, lat1)) + elif 'lat' in (ds.coords.dims): + ds = ds.sel(lat=slice(lat0, lat1)) + + if 'longitude' in list(regions_specs[region]['domain'].keys()): + lon0 = regions_specs[region]['domain']['longitude'][0] + lon1 = regions_specs[region]['domain']['longitude'][1] + + # check original dataset longitude range + if 'longitude' in (ds.coords.dims): + lon_min = ds.longitude.min() + lon_max = ds.longitude.max() + elif 'lon' in (ds.coords.dims): + lon_min = ds.lon.min() + lon_max = ds.lon.max() + + # longitude range swap if needed + if min(lon0, lon1) < 0: # when subset region lon is defined in (-180, 180) range + if min(lon_min, lon_max) < 0: # if original data lon range is (-180, 180) no treatment needed + pass + else: # if original data lon range is (0, 360), convert swap lon + ds = xc.swap_lon_axis(ds, to=(-180, 180)) + + # proceed subset + if 'longitude' in (ds.coords.dims): + ds = ds.sel(longitude=slice(lon0, lon1)) + elif 'lon' in (ds.coords.dims): + ds = ds.sel(lon=slice(lon0, lon1)) + + return ds diff --git a/pcmdi_metrics/io/xcdat_openxml.py b/pcmdi_metrics/io/xcdat_openxml.py new file mode 100644 index 000000000..0135481c0 --- /dev/null +++ b/pcmdi_metrics/io/xcdat_openxml.py @@ -0,0 +1,62 @@ +import glob +import os +import sys + +import xcdat +import xmltodict + + +def xcdat_open(infile, data_var=None, decode_times=True): + """ + Parameter + --------- + infile: + list of string, or string + File(s) to open using xcdat + data_var: + (Optional[str], optional) – The key of the non-bounds data variable to keep in the Dataset, alongside any existing bounds data variables, by default None. + + Output + ------ + ds: + xcdat dataset + """ + if isinstance(infile, list): + ds = xcdat.open_mfdataset(infile, data_var=data_var, decode_times=decode_times) + else: + if infile.split('.')[-1].lower() == 'xml': + ds = xcdat_openxml(infile, data_var=data_var, decode_times=decode_times) + else: + ds = xcdat.open_dataset(infile, data_var=data_var, decode_times=decode_times) + + return ds + + +def xcdat_openxml(xmlfile, data_var=None, decode_times=True): + """ + Parameter + --------- + infile: + xml file to open using xcdat + data_var: + (Optional[str], optional) – The key of the non-bounds data variable to keep in the Dataset, alongside any existing bounds data variables, by default None. + + Output + ------ + ds: + xcdat dataset + """ + if not os.path.exists(xmlfile): + sys.exit('ERROR: File not exist: {}'.format(xmlfile)) + + with open(xmlfile) as fd: + doc = xmltodict.parse(fd.read()) + + ncfile_list = glob.glob(os.path.join(doc['dataset']['@directory'], '*.nc')) + + if len(ncfile_list) > 1: + ds = xcdat.open_mfdataset(ncfile_list, data_var=data_var, decode_times=decode_times) + else: + ds = xcdat.open_dataset(ncfile_list[0], data_var=data_var, decode_times=decode_times) + + return ds diff --git a/pcmdi_metrics/mean_climate/README.md b/pcmdi_metrics/mean_climate/README.md new file mode 100644 index 000000000..f2d1dc4d1 --- /dev/null +++ b/pcmdi_metrics/mean_climate/README.md @@ -0,0 +1,13 @@ +# PMP Mean Climate Metrics + +## STEP 1. Calculate annual cycle and seasonal mean, and archive + +Example usage: + +```pcmdi_compute_climatologies.py -p param/basic_annual_cycle_param.py``` + +## STEP 2. Compute metrics + +Example usage: + +```mean_climate_driver.py -p param/basic_param.py``` diff --git a/pcmdi_metrics/driver/__init__.py b/pcmdi_metrics/mean_climate/__init__.py similarity index 100% rename from pcmdi_metrics/driver/__init__.py rename to pcmdi_metrics/mean_climate/__init__.py diff --git a/pcmdi_metrics/mean_climate/deprecated/lib/__init__.py b/pcmdi_metrics/mean_climate/deprecated/lib/__init__.py new file mode 100644 index 000000000..5d395052c --- /dev/null +++ b/pcmdi_metrics/mean_climate/deprecated/lib/__init__.py @@ -0,0 +1,25 @@ +from .compute_statistics import ( # noqa + annual_mean, + bias_xy, + cor_xy, + mean_xy, + meanabs_xy, + rms_0, + rms_xy, + rms_xyt, + rmsc_xy, + seasonal_mean, + std_xy, + std_xyt, + zonal_mean, +) +from .mean_climate_metrics_calculations import compute_metrics # noqa +from .mean_climate_metrics_driver import PMPDriver, create_mean_climate_parser # noqa + +from . import dataset # DataSet # noqa # isort:skip +from . import io # noqa # isort:skip +from . import model # Model # noqa # isort:skip +from . import observation # OBS, Observation # noqa # isort:skip +from . import outputmetrics # OutputMetrics # noqa # isort:skip +from . import pmp_parameter # PMPParameter, PMPMetricsParameter # noqa # isort:skip +from . import pmp_parser # PMPParser, PMPMetricsParser # noqa # isort:skip diff --git a/pcmdi_metrics/driver/dataset.py b/pcmdi_metrics/mean_climate/deprecated/lib/dataset.py similarity index 82% rename from pcmdi_metrics/driver/dataset.py rename to pcmdi_metrics/mean_climate/deprecated/lib/dataset.py index d137806fc..42090a48f 100644 --- a/pcmdi_metrics/driver/dataset.py +++ b/pcmdi_metrics/mean_climate/deprecated/lib/dataset.py @@ -5,6 +5,7 @@ import cdms2 import cdutil +import xcdat from six import with_metaclass from pcmdi_metrics import resources @@ -72,10 +73,12 @@ def create_sftlf(parameter): """Create the sftlf file from the parameter.""" sftlf = {} + print('jwlee-test_create_sftlf, parameter.test_data_set:', parameter.test_data_set) for test in parameter.test_data_set: tmp_name = getattr(parameter, "sftlf_filename_template") if tmp_name is None: # Not defined from commandline or param file tmp_name = parameter.filename_template + print('jwlee-test_create_sftlf, tmp_name:', tmp_name) sft = Base(parameter.test_data_path, tmp_name) sft.model_version = test sft.table = "fx" @@ -87,21 +90,34 @@ def create_sftlf(parameter): sft.realization = "r0i0p0" DataSet.apply_custom_keys(sft, parameter.custom_keys, "sftlf") try: + print('jwlee-test_create_sftlf, chk1') sftlf[test] = {"raw": sft.get("sftlf")} + print('jwlee-test_create_sftlf, chk1-2') sftlf[test]["filename"] = os.path.basename(sft()) + print('jwlee-test_create_sftlf, chk1-3') sftlf[test]["md5"] = sft.hash() + print('jwlee-test_create_sftlf, chk1-4') except Exception: + print('jwlee-test_create_sftlf, chk2') sftlf[test] = {"raw": None} sftlf[test]["filename"] = None sftlf[test]["md5"] = None + print('jwlee-test-target_grid-create') if parameter.target_grid == "2.5x2.5": - t_grid = cdms2.createUniformGrid(-88.875, 72, 2.5, 0, 144, 2.5) + t_grid_cdms2 = cdms2.createUniformGrid(-88.875, 72, 2.5, 0, 144, 2.5) + t_grid = xcdat.create_uniform_grid(-88.875, 88.625, 2.5, 0, 357.5, 2.5) else: t_grid = parameter.target_grid + print('jwlee-test-target_grid-create done') + print('jwlee-test-target_grid-create t_grid:', t_grid) - sft = cdutil.generateLandSeaMask(t_grid) + # sft = cdutil.generateLandSeaMask(t_grid) + sft = cdutil.generateLandSeaMask(t_grid_cdms2) sft[:] = sft.filled(1.0) * 100.0 sftlf["target_grid"] = sft + print('jwlee-test-target_grid, type(sft), sft.shape:', type(sft), sft.shape) + + print("jwlee-test_create_sftlf, sftlf[test]['raw']:", sftlf[test]['raw']) return sftlf diff --git a/pcmdi_metrics/pcmdi/io.py b/pcmdi_metrics/mean_climate/deprecated/lib/io.py similarity index 100% rename from pcmdi_metrics/pcmdi/io.py rename to pcmdi_metrics/mean_climate/deprecated/lib/io.py diff --git a/pcmdi_metrics/pcmdi/mean_climate_metrics_driver.py b/pcmdi_metrics/mean_climate/deprecated/lib/mean_climate_metrics_driver.py similarity index 92% rename from pcmdi_metrics/pcmdi/mean_climate_metrics_driver.py rename to pcmdi_metrics/mean_climate/deprecated/lib/mean_climate_metrics_driver.py index 294b76f7d..c7dea489f 100644 --- a/pcmdi_metrics/pcmdi/mean_climate_metrics_driver.py +++ b/pcmdi_metrics/mean_climate/deprecated/lib/mean_climate_metrics_driver.py @@ -3,12 +3,12 @@ import json import logging -import pcmdi_metrics.driver.dataset -import pcmdi_metrics.driver.pmp_parser from pcmdi_metrics import LOG_LEVEL -from pcmdi_metrics.driver.model import Model -from pcmdi_metrics.driver.observation import Observation -from pcmdi_metrics.driver.outputmetrics import OutputMetrics +from pcmdi_metrics.mean_climate.lib import pmp_parser +from pcmdi_metrics.mean_climate.lib.dataset import DataSet +from pcmdi_metrics.mean_climate.lib.model import Model +from pcmdi_metrics.mean_climate.lib.observation import Observation +from pcmdi_metrics.mean_climate.lib.outputmetrics import OutputMetrics class PMPDriver(object): @@ -37,7 +37,7 @@ def __init__(self, parameter): self.var = "" self.output_metric = None self.region = "" - self.sftlf = pcmdi_metrics.driver.dataset.DataSet.create_sftlf(self.parameter) + self.sftlf = DataSet.create_sftlf(self.parameter) self.default_regions = [] self.regions_specs = {} @@ -69,7 +69,7 @@ def load_obs_dict(self): """Loads obs_info_dictionary.json and appends custom_observations from the parameter file if needed.""" obs_file_name = "obs_info_dictionary.json" - obs_json_file = pcmdi_metrics.driver.dataset.DataSet.load_path_as_file_obj( + obs_json_file = DataSet.load_path_as_file_obj( obs_file_name ) obs_dict = json.loads(obs_json_file.read()) @@ -105,8 +105,9 @@ def load_default_regions_and_regions_specs(self): """Gets the default_regions dict and regions_specs dict from default_regions.py and stores them as attributes.""" default_regions_file = ( - pcmdi_metrics.driver.dataset.DataSet.load_path_as_file_obj( + DataSet.load_path_as_file_obj( "default_regions.py" + # "default_regions_xcdat.py" ) ) exec( @@ -157,6 +158,8 @@ def run_reference_and_test_comparison(self): reference_data_set = self.parameter.reference_data_set test_data_set = self.parameter.test_data_set + print('jwlee-test-0, test_data_set:', test_data_set) + reference_data_set_is_obs = self.is_data_set_obs(reference_data_set) test_data_set_is_obs = self.is_data_set_obs(test_data_set) @@ -177,11 +180,8 @@ def run_reference_and_test_comparison(self): test_data_set, self.obs_dict, self.var ) - logging.getLogger("pcmdi_metrics").info( - "reference_data_set (adjusted): {}, test_data_set: {}".format( - reference_data_set, test_data_set - ) - ) + print('jwlee-test-1, test_data_set:', test_data_set) + print('jwlee-test-1, test_data_set_is_obs:', test_data_set_is_obs) if len(reference_data_set) == 0: # We did not find any ref!!! raise RuntimeError("No reference dataset found!") @@ -206,6 +206,7 @@ def run_reference_and_test_comparison(self): ) self.output_metric.add_region(self.region) try: + print('jwlee-test-1.5, test_data_set_is_obs, test, self.parameter.test_data_path:', test_data_set_is_obs, test, self.parameter.test_data_path) tst = self.determine_obs_or_model( test_data_set_is_obs, test, self.parameter.test_data_path ) @@ -221,6 +222,9 @@ def run_reference_and_test_comparison(self): break try: + print('jwlee-test-2: type(self), ref, tst:', type(self), ref, tst) + print('jwlee-test-2: self.var, self.var_name_long:', self.var, self.var_name_long) + print('jwlee-test-2: tst()[self.var].shape:', tst()[self.var].shape) self.output_metric.calculate_and_output_metrics(ref, tst) except RuntimeError: continue @@ -247,6 +251,7 @@ def is_data_set_obs(self, data_set): return data_set_is_obs def determine_obs_or_model(self, is_obs, ref_or_test, data_path): + print('jwlee-test-1.5-1: is_obs, ref_or_test, data_path:', is_obs, ref_or_test, data_path) """Actually create Observation or Module object based on if ref_or_test is an obs or model.""" if is_obs: @@ -274,7 +279,7 @@ def determine_obs_or_model(self, is_obs, ref_or_test, data_path): def create_mean_climate_parser(): - parser = pcmdi_metrics.driver.pmp_parser.PMPMetricsParser() + parser = pmp_parser.PMPMetricsParser() parser.add_argument( "--case_id", dest="case_id", diff --git a/pcmdi_metrics/driver/model.py b/pcmdi_metrics/mean_climate/deprecated/lib/model.py similarity index 86% rename from pcmdi_metrics/driver/model.py rename to pcmdi_metrics/mean_climate/deprecated/lib/model.py index 7a43becba..8a052d128 100644 --- a/pcmdi_metrics/driver/model.py +++ b/pcmdi_metrics/mean_climate/deprecated/lib/model.py @@ -5,12 +5,12 @@ import cdutil import MV2 -import pcmdi_metrics.driver.dataset from pcmdi_metrics import LOG_LEVEL from pcmdi_metrics.io.base import Base +from pcmdi_metrics.mean_climate.lib.dataset import DataSet -class Model(pcmdi_metrics.driver.dataset.DataSet): +class Model(DataSet): """Handles all the computation (setting masking, target grid, etc) and some file I/O related to models.""" @@ -44,9 +44,17 @@ def setup_target_mask(self): """Sets the mask and target_mask attribute of self._model_file""" self.var_in_file = self.get_var_in_file() + print('jwlee-test-setup_target_mask, self.var_in_file:', self.var_in_file) + print('jwlee-test-setup_target_mask, self.region:', self.region) + print('jwlee-test-setup_target_mask, self.obs_or_model:', self.obs_or_model) + if self.region is not None: region_value = self.region.get("value", None) + print('jwlee-test-setup_target_mask, region_value:', region_value) if region_value is not None: + print('jwlee-test-setup_target_mask, self.sftlf:', self.sftlf) + print('jwlee-test-setup_target_mask, self.sftlf[self.obs_or_model]:', self.sftlf[self.obs_or_model]) + print('jwlee-test-setup_target_mask, self.sftlf[self.obs_or_model]["raw"]:', self.sftlf[self.obs_or_model]["raw"]) if self.sftlf[self.obs_or_model]["raw"] is None: self.create_sftlf_model_raw(self.var_in_file) @@ -58,6 +66,7 @@ def setup_target_mask(self): def get(self): """Gets the variable based on the region and level (if given) for the file from data_path, which is defined in the initalizer.""" + print('jwlee-test-get: self.var_in_file, self.region:', self.var_in_file, self.region) try: if self.level is None: data_model = self._model_file.get( diff --git a/pcmdi_metrics/driver/observation.py b/pcmdi_metrics/mean_climate/deprecated/lib/observation.py similarity index 97% rename from pcmdi_metrics/driver/observation.py rename to pcmdi_metrics/mean_climate/deprecated/lib/observation.py index 99806e8df..2d7faea1c 100644 --- a/pcmdi_metrics/driver/observation.py +++ b/pcmdi_metrics/mean_climate/deprecated/lib/observation.py @@ -3,8 +3,8 @@ import MV2 from pcmdi_metrics import LOG_LEVEL -from pcmdi_metrics.driver.dataset import DataSet from pcmdi_metrics.io.base import Base +from pcmdi_metrics.mean_climate.lib.dataset import DataSet try: basestring # noqa @@ -132,6 +132,7 @@ def get(self): """Gets the variable based on the region and level (if given) for the file from data_path, which is defined in the initializer.""" try: + print('jwlee-test-observation-get, self.level:', self.level) if self.level is not None: data_obs = self._obs_file.get( self.var, level=self.level, region=self.region diff --git a/pcmdi_metrics/driver/outputmetrics.py b/pcmdi_metrics/mean_climate/deprecated/lib/outputmetrics.py similarity index 84% rename from pcmdi_metrics/driver/outputmetrics.py rename to pcmdi_metrics/mean_climate/deprecated/lib/outputmetrics.py index e69131ac0..05a6fe434 100644 --- a/pcmdi_metrics/driver/outputmetrics.py +++ b/pcmdi_metrics/mean_climate/deprecated/lib/outputmetrics.py @@ -4,11 +4,11 @@ import cdms2 -import pcmdi_metrics from pcmdi_metrics import LOG_LEVEL -from pcmdi_metrics.driver.dataset import DataSet -from pcmdi_metrics.driver.observation import Observation from pcmdi_metrics.io.base import Base +from pcmdi_metrics.mean_climate.lib import compute_metrics +from pcmdi_metrics.mean_climate.lib.dataset import DataSet +from pcmdi_metrics.mean_climate.lib.observation import Observation try: basestring # noqa @@ -120,6 +120,10 @@ def calculate_and_output_metrics(self, ref, test): self.metrics_dictionary["References"][ref.obs_or_model] = self.obs_var_ref + print('jwlee-test-calculate_and_output_metrics, self.obs_var_ref:', self.obs_var_ref) + + ref_data = None + try: ref_data = ref() except Exception as e: @@ -139,15 +143,29 @@ def calculate_and_output_metrics(self, ref, test): raise RuntimeError("Need to skip model: %s" % test.obs_or_model) # Todo: Make this a fcn - self.set_grid_in_metrics_dictionary(test_data) - - if ref_data.shape != test_data.shape: + print('jwlee-test-calculate_and_output_metrics, grid_in_metrics_dict start') + self.set_grid_in_metrics_dictionary(test_data, self.var) + print('jwlee-test-calculate_and_output_metrics, grid_in_metrics_dict done') + print('jwlee-test type(ref_data), type(test_data):', type(ref_data), type(test_data)) + print('jwlee-test ref_data:', ref_data) + print('jwlee-test test_data:', test_data) + print('jwlee-test ref_data[self.var]:', ref_data[self.var]) + print('jwlee-test test_data[self.var]:', test_data[self.var]) + print('jwlee-test ref_data[self.var].shape:', ref_data[self.var].shape) + print('jwlee-test test_data[self.var].shape:', test_data[self.var].shape) + + # if ref_data.shape != test_data.shape: + if ref_data[self.var].shape != test_data[self.var].shape: + print('jwlee-test raise runtime error') raise RuntimeError( - "Two data sets have different shapes. %s vs %s" - % (ref_data.shape, test_data.shape) + "Two data sets have different shapes. {} vs {}".format( + str(ref_data[self.var].shape), str(test_data[self.var].shape)) + # % (ref_data.shape, test_data.shape) ) + print('jwlee-test-calculate_and_output_metrics, set_simulation_desc start') self.set_simulation_desc(test, test_data) + print('jwlee-test-calculate_and_output_metrics, set_simulation_desc done') if ( ref.obs_or_model @@ -162,14 +180,18 @@ def calculate_and_output_metrics(self, ref, test): ].get(self.parameter.realization, {}) if not self.parameter.dry_run: - pr_rgn = pcmdi_metrics.pcmdi.compute_metrics( + print('jwlee-test-calculate_and_output_metrics, compute_metrics start') + print('jwlee-test-calculate_and_output_metrics, self.var_name_long:', self.var_name_long) + + pr_rgn = compute_metrics( self.var_name_long, test_data, ref_data ) + print('jwlee-test-calculate_and_output_metrics, compute_metrics done') # Calling compute_metrics with None for the model and obs returns # the definitions. self.metrics_def_dictionary.update( - pcmdi_metrics.pcmdi.compute_metrics(self.var_name_long, None, None) + compute_metrics(self.var_name_long, None, None) ) if hasattr(self.parameter, "compute_custom_metrics"): pr_rgn.update( @@ -204,14 +226,20 @@ def calculate_and_output_metrics(self, ref, test): else: self.write_on_exit(False) - def set_grid_in_metrics_dictionary(self, test_data): + def set_grid_in_metrics_dictionary(self, test_data, var): """Set the grid in metrics_dictionary.""" + print('jwlee-test set_grid_in_metrics_dictionary start') grid = {} grid["RegridMethod"] = self.regrid_method grid["RegridTool"] = self.regrid_tool grid["GridName"] = self.parameter.target_grid - grid["GridResolution"] = test_data.shape[1:] + print('jwlee-test set_grid_in_metrics_dictionary middle') + print('jwlee-test var:', var) + # print('jwlee-test dir(test_data):', dir(test_data)) + # grid["GridResolution"] = test_data.shape[1:] + grid["GridResolution"] = test_data[var].shape[1:] self.metrics_dictionary["GridInfo"] = grid + print('jwlee-test set_grid_in_metrics_dictionary done') def set_simulation_desc(self, test, test_data): """Fillout information for the output .json and .txt files.""" @@ -308,6 +336,8 @@ def output_interpolated_model_climatologies(self, test, test_data): clim_file.region = region_name clim_file.realization = self.parameter.realization DataSet.apply_custom_keys(clim_file, self.parameter.custom_keys, self.var) + print('jwlee-test outputmetrics clim_file.write') + print('type(test_data):', type(test_data)) clim_file.write(test_data, type="nc", id=self.var) def get_region_name_from_region(self, region): diff --git a/pcmdi_metrics/pcmdi/scripts/mean_climate_driver.py b/pcmdi_metrics/mean_climate/deprecated/mean_climate_driver.py old mode 100644 new mode 100755 similarity index 70% rename from pcmdi_metrics/pcmdi/scripts/mean_climate_driver.py rename to pcmdi_metrics/mean_climate/deprecated/mean_climate_driver.py index d431de0d3..5429360d5 --- a/pcmdi_metrics/pcmdi/scripts/mean_climate_driver.py +++ b/pcmdi_metrics/mean_climate/deprecated/mean_climate_driver.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -from pcmdi_metrics.pcmdi import PMPDriver, create_mean_climate_parser +from pcmdi_metrics.mean_climate.lib import PMPDriver, create_mean_climate_parser parser = create_mean_climate_parser() parameter = parser.get_parameter(cmd_default_vars=False, argparse_vals_only=False) diff --git a/pcmdi_metrics/mean_climate/lib/__init__.py b/pcmdi_metrics/mean_climate/lib/__init__.py new file mode 100644 index 000000000..d62544d27 --- /dev/null +++ b/pcmdi_metrics/mean_climate/lib/__init__.py @@ -0,0 +1,20 @@ +from .compute_metrics import compute_metrics # noqa +from .compute_statistics import ( # noqa + annual_mean, + bias_xy, + cor_xy, + mean_xy, + meanabs_xy, + rms_0, + rms_xy, + rms_xyt, + rmsc_xy, + seasonal_mean, + std_xy, + std_xyt, + zonal_mean, +) +from .create_mean_climate_parser import create_mean_climate_parser # noqa +from .load_and_regrid import load_and_regrid # noqa +from .mean_climate_metrics_to_json import mean_climate_metrics_to_json # noqa +from .calculate_climatology import calculate_climatology # noqa diff --git a/pcmdi_metrics/mean_climate/lib/calculate_climatology.py b/pcmdi_metrics/mean_climate/lib/calculate_climatology.py new file mode 100644 index 000000000..28f05920d --- /dev/null +++ b/pcmdi_metrics/mean_climate/lib/calculate_climatology.py @@ -0,0 +1,106 @@ +import datetime +import os + +import dask +from genutil import StringConstructor + +from pcmdi_metrics.io import xcdat_open + + +def calculate_climatology( + var, infile, + outfile=None, outpath=None, outfilename=None, + start=None, end=None, ver=None): + + if ver is None: + ver=datetime.datetime.now().strftime("v%Y%m%d") + + print("ver:", ver) + + infilename = infile.split("/")[-1] + print("infilename:", infilename) + + # open file + d = xcdat_open(infile, data_var=var) # wrapper of xcdat open functions to enable using xml + atts = d.attrs + + print("type(d):", type(d)) + print("atts:", atts) + + # CONTROL OF OUTPUT DIRECTORY AND FILE + out = outfile + if outpath is None: + outdir = os.path.dirname(outfile) + else: + outdir = outpath + os.makedirs(outdir, exist_ok=True) + + print("outdir:", outdir) + + # CLIM PERIOD + if (start is None) and (end is None): + # DEFAULT CLIM - BASED ON ENTIRE TIME SERIES + start_yr = int(d.time["time.year"][0]) + start_mo = int(d.time["time.month"][0]) + start_da = int(d.time["time.day"][0]) + end_yr = int(d.time["time.year"][-1]) + end_mo = int(d.time["time.month"][-1]) + end_da = int(d.time["time.day"][-1]) + else: + # USER DEFINED PERIOD + start_yr = int(start.split("-")[0]) + start_mo = int(start.split("-")[1]) + start_da = 1 + end_yr = int(end.split("-")[0]) + end_mo = int(end.split("-")[1]) + end_da = int(d.time.dt.days_in_month.sel(time=(d.time.dt.year == end_yr))[end_mo - 1]) + + start_yr_str = str(start_yr).zfill(4) + start_mo_str = str(start_mo).zfill(2) + start_da_str = str(start_da).zfill(2) + end_yr_str = str(end_yr).zfill(4) + end_mo_str = str(end_mo).zfill(2) + end_da_str = str(end_da).zfill(2) + + # Subset given time period + d = d.sel(time=slice(start_yr_str + '-' + start_mo_str + '-' + start_da_str, + end_yr_str + '-' + end_mo_str + '-' + end_da_str)) + + print("start_yr_str is ", start_yr_str) + print("start_mo_str is ", start_mo_str) + print("end_yr_str is ", end_yr_str) + print("end_mo_str is ", end_mo_str) + + # Calculate climatology + dask.config.set(**{'array.slicing.split_large_chunks': True}) + d_clim = d.temporal.climatology(var, freq="season", weighted=True, season_config={"dec_mode": "DJF", "drop_incomplete_djf": True},) + d_ac = d.temporal.climatology(var, freq="month", weighted=True) + + d_clim_dict = dict() + + d_clim_dict['DJF'] = d_clim.isel(time=0) + d_clim_dict['MAM'] = d_clim.isel(time=1) + d_clim_dict['JJA'] = d_clim.isel(time=2) + d_clim_dict['SON'] = d_clim.isel(time=3) + d_clim_dict['AC'] = d_ac + + for s in ["AC", "DJF", "MAM", "JJA", "SON"]: + addf = ( + "." + + start_yr_str + + start_mo_str + + "-" + + end_yr_str + + end_mo_str + + "." + + s + + "." + + ver + + ".nc" + ) + if outfilename is not None: + out = os.path.join(outdir, outfilename) + out_season = out.replace(".nc", addf) + + print("output file is", out_season) + d_clim_dict[s].to_netcdf(out_season) # global attributes are automatically saved as well diff --git a/pcmdi_metrics/mean_climate/lib/compute_metrics.py b/pcmdi_metrics/mean_climate/lib/compute_metrics.py new file mode 100644 index 000000000..7d1e842f3 --- /dev/null +++ b/pcmdi_metrics/mean_climate/lib/compute_metrics.py @@ -0,0 +1,272 @@ +from collections import OrderedDict + +import pcmdi_metrics + + +def compute_metrics(Var, dm, do, debug=False): + # Var is sometimes sent with level associated + var = Var.split("_")[0] + # Did we send data? Or do we just want the info? + if dm is None and do is None: + metrics_defs = OrderedDict() + metrics_defs["rms_xyt"] = pcmdi_metrics.mean_climate.lib.rms_xyt(None, None) + metrics_defs["rms_xy"] = pcmdi_metrics.mean_climate.lib.rms_xy(None, None) + metrics_defs["rmsc_xy"] = pcmdi_metrics.mean_climate.lib.rmsc_xy(None, None) + metrics_defs["bias_xy"] = pcmdi_metrics.mean_climate.lib.bias_xy(None, None) + metrics_defs["mae_xy"] = pcmdi_metrics.mean_climate.lib.meanabs_xy(None, None) + metrics_defs["cor_xy"] = pcmdi_metrics.mean_climate.lib.cor_xy(None, None) + metrics_defs["mean_xy"] = pcmdi_metrics.mean_climate.lib.mean_xy(None) + metrics_defs["std_xy"] = pcmdi_metrics.mean_climate.lib.std_xy(None) + metrics_defs["std_xyt"] = pcmdi_metrics.mean_climate.lib.std_xyt(None) + + metrics_defs["seasonal_mean"] = pcmdi_metrics.mean_climate.lib.seasonal_mean( + None, None + ) + metrics_defs["annual_mean"] = pcmdi_metrics.mean_climate.lib.annual_mean( + None, None + ) + metrics_defs["zonal_mean"] = pcmdi_metrics.mean_climate.lib.zonal_mean(None, None) + return metrics_defs + + # cdms.setAutoBounds("on") + print('var: ', var) + + # unify time and time bounds between observation and model + if debug: + print('before time and time bounds unifying') + print('dm.time: ', dm['time']) + print('do.time: ', do['time']) + + # Below is temporary... + dm['time'] = do['time'] + dm[dm.time.attrs['bounds']] = do[do.time.attrs['bounds']] + + if debug: + print('after time and time bounds unifying') + print('dm.time: ', dm['time']) + print('do.time: ', do['time']) + + #if debug: + # dm.to_netcdf('dm.nc') + # do.to_netcdf('do.nc') + + metrics_dictionary = OrderedDict() + + # SET CONDITIONAL ON INPUT VARIABLE + if var == "pr": + conv = 86400.0 + else: + conv = 1.0 + + if var in ["hus"]: + sig_digits = ".5f" + else: + sig_digits = ".3f" + + # CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD + print('compute_metrics-CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD') + print('compute_metrics, rms_xyt') + rms_xyt = pcmdi_metrics.mean_climate.lib.rms_xyt(dm, do, var) + print('compute_metrics, stdObs_xyt') + stdObs_xyt = pcmdi_metrics.mean_climate.lib.std_xyt(do, var) + print('compute_metrics, std_xyt') + std_xyt = pcmdi_metrics.mean_climate.lib.std_xyt(dm, var) + + # CALCULATE ANNUAL MEANS + print('compute_metrics-CALCULATE ANNUAL MEANS') + dm_am, do_am = pcmdi_metrics.mean_climate.lib.annual_mean(dm, do, var) + + # CALCULATE ANNUAL MEAN BIAS + print('compute_metrics-CALCULATE ANNUAL MEAN BIAS') + bias_xy = pcmdi_metrics.mean_climate.lib.bias_xy(dm_am, do_am, var) + + # CALCULATE MEAN ABSOLUTE ERROR + print('compute_metrics-CALCULATE MSE') + mae_xy = pcmdi_metrics.mean_climate.lib.meanabs_xy(dm_am, do_am, var) + + # CALCULATE ANNUAL MEAN RMS (centered and uncentered) + print('compute_metrics-CALCULATE MEAN RMS') + rms_xy = pcmdi_metrics.mean_climate.lib.rms_xy(dm_am, do_am, var) + rmsc_xy = pcmdi_metrics.mean_climate.lib.rmsc_xy(dm_am, do_am, var) + + # CALCULATE ANNUAL MEAN CORRELATION + print('compute_metrics-CALCULATE MEAN CORR') + cor_xy = pcmdi_metrics.mean_climate.lib.cor_xy(dm_am, do_am, var) + + # CALCULATE ANNUAL OBS and MOD STD + print('compute_metrics-CALCULATE ANNUAL OBS AND MOD STD') + stdObs_xy = pcmdi_metrics.mean_climate.lib.std_xy(do_am, var) + std_xy = pcmdi_metrics.mean_climate.lib.std_xy(dm_am, var) + + # CALCULATE ANNUAL OBS and MOD MEAN + print('compute_metrics-CALCULATE ANNUAL OBS AND MOD MEAN') + meanObs_xy = pcmdi_metrics.mean_climate.lib.mean_xy(do_am, var) + mean_xy = pcmdi_metrics.mean_climate.lib.mean_xy(dm_am, var) + + # ZONAL MEANS ###### + # CALCULATE ANNUAL MEANS + print('compute_metrics-CALCULATE ANNUAL MEANS') + dm_amzm, do_amzm = pcmdi_metrics.mean_climate.lib.zonal_mean(dm_am, do_am, var) + + # CALCULATE ANNUAL AND ZONAL MEAN RMS + print('compute_metrics-CALCULATE ANNUAL AND ZONAL MEAN RMS') + rms_y = pcmdi_metrics.mean_climate.lib.rms_0(dm_amzm, do_amzm, var) + + # CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS + print('compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS') + dm_am_devzm = dm_am - dm_amzm + do_am_devzm = do_am - do_amzm + rms_xy_devzm = pcmdi_metrics.mean_climate.lib.rms_xy(dm_am_devzm, do_am_devzm, var, weights=dm.spatial.get_weights(axis=['X', 'Y'])) + + # CALCULATE ANNUAL AND ZONAL MEAN STD + + # CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD + print('compute_metrics-CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD') + stdObs_xy_devzm = pcmdi_metrics.mean_climate.lib.std_xy(do_am_devzm, var, weights=do.spatial.get_weights(axis=['X', 'Y'])) + std_xy_devzm = pcmdi_metrics.mean_climate.lib.std_xy(dm_am_devzm, var, weights=dm.spatial.get_weights(axis=['X', 'Y'])) + + for stat in sorted([ + "std-obs_xy", + "std_xy", + "std-obs_xyt", + "std_xyt", + "std-obs_xy_devzm", + "mean_xy", + "mean-obs_xy", + "std_xy_devzm", + "rms_xyt", + "rms_xy", + "rmsc_xy", + "cor_xy", + "bias_xy", + "mae_xy", + "rms_y", + "rms_devzm", + ]): + metrics_dictionary[stat] = OrderedDict() + + metrics_dictionary["mean-obs_xy"]["ann"] = format(meanObs_xy * conv, sig_digits) + metrics_dictionary["mean_xy"]["ann"] = format(mean_xy * conv, sig_digits) + metrics_dictionary["std-obs_xy"]["ann"] = format(stdObs_xy * conv, sig_digits) + metrics_dictionary["std_xy"]["ann"] = format(std_xy * conv, sig_digits) + metrics_dictionary["std-obs_xyt"]["ann"] = format(stdObs_xyt * conv, sig_digits) + metrics_dictionary["std_xyt"]["ann"] = format(std_xyt * conv, sig_digits) + metrics_dictionary["std-obs_xy_devzm"]["ann"] = format( + stdObs_xy_devzm * conv, sig_digits + ) + metrics_dictionary["std_xy_devzm"]["ann"] = format(std_xy_devzm * conv, sig_digits) + metrics_dictionary["rms_xyt"]["ann"] = format(rms_xyt * conv, sig_digits) + metrics_dictionary["rms_xy"]["ann"] = format(rms_xy * conv, sig_digits) + metrics_dictionary["rmsc_xy"]["ann"] = format(rmsc_xy * conv, sig_digits) + metrics_dictionary["cor_xy"]["ann"] = format(cor_xy, sig_digits) + metrics_dictionary["bias_xy"]["ann"] = format(bias_xy * conv, sig_digits) + metrics_dictionary["mae_xy"]["ann"] = format(mae_xy * conv, sig_digits) + # ZONAL MEAN CONTRIBUTIONS + metrics_dictionary["rms_y"]["ann"] = format(rms_y * conv, sig_digits) + metrics_dictionary["rms_devzm"]["ann"] = format(rms_xy_devzm * conv, sig_digits) + + # CALCULATE SEASONAL MEANS + for sea in ["djf", "mam", "jja", "son"]: + + dm_sea = pcmdi_metrics.mean_climate.lib.seasonal_mean(dm, sea, var) + do_sea = pcmdi_metrics.mean_climate.lib.seasonal_mean(do, sea, var) + + # CALCULATE SEASONAL RMS AND CORRELATION + rms_sea = pcmdi_metrics.mean_climate.lib.rms_xy(dm_sea, do_sea, var) + rmsc_sea = pcmdi_metrics.mean_climate.lib.rmsc_xy(dm_sea, do_sea, var) + cor_sea = pcmdi_metrics.mean_climate.lib.cor_xy(dm_sea, do_sea, var) + mae_sea = pcmdi_metrics.mean_climate.lib.meanabs_xy(dm_sea, do_sea, var) + bias_sea = pcmdi_metrics.mean_climate.lib.bias_xy(dm_sea, do_sea, var) + + # CALCULATE SEASONAL OBS and MOD STD + stdObs_xy_sea = pcmdi_metrics.mean_climate.lib.std_xy(do_sea, var) + std_xy_sea = pcmdi_metrics.mean_climate.lib.std_xy(dm_sea, var) + + # CALCULATE SEASONAL OBS and MOD MEAN + meanObs_xy_sea = pcmdi_metrics.mean_climate.lib.mean_xy(do_sea, var) + mean_xy_sea = pcmdi_metrics.mean_climate.lib.mean_xy(dm_sea, var) + + metrics_dictionary["bias_xy"][sea] = format(bias_sea * conv, sig_digits) + metrics_dictionary["rms_xy"][sea] = format(rms_sea * conv, sig_digits) + metrics_dictionary["rmsc_xy"][sea] = format(rmsc_sea * conv, sig_digits) + metrics_dictionary["cor_xy"][sea] = format(cor_sea, ".2f") + metrics_dictionary["mae_xy"][sea] = format(mae_sea * conv, sig_digits) + metrics_dictionary["std-obs_xy"][sea] = format(stdObs_xy_sea * conv, sig_digits) + metrics_dictionary["std_xy"][sea] = format(std_xy_sea * conv, sig_digits) + metrics_dictionary["mean-obs_xy"][sea] = format(meanObs_xy_sea * conv, sig_digits) + metrics_dictionary["mean_xy"][sea] = format(mean_xy_sea * conv, sig_digits) + + rms_mo_l = [] + rmsc_mo_l = [] + cor_mo_l = [] + mae_mo_l = [] + bias_mo_l = [] + stdObs_xy_mo_l = [] + std_xy_mo_l = [] + meanObs_xy_mo_l = [] + mean_xy_mo_l = [] + + for n, mo in enumerate( + [ + "jan", + "feb", + "mar", + "apr", + "may", + "jun", + "jul", + "aug", + "sep", + "oct", + "nov", + "dec", + ] + ): + dm_mo = dm.isel(time=n) + do_mo = do.isel(time=n) + + # CALCULATE MONTHLY RMS AND CORRELATION + rms_mo = pcmdi_metrics.mean_climate.lib.rms_xy(dm_mo, do_mo, var) + rmsc_mo = pcmdi_metrics.mean_climate.lib.rmsc_xy(dm_mo, do_mo, var) + cor_mo = pcmdi_metrics.mean_climate.lib.cor_xy(dm_mo, do_mo, var) + mae_mo = pcmdi_metrics.mean_climate.lib.meanabs_xy(dm_mo, do_mo, var) + bias_mo = pcmdi_metrics.mean_climate.lib.bias_xy(dm_mo, do_mo, var) + + # CALCULATE MONTHLY OBS and MOD STD + stdObs_xy_mo = pcmdi_metrics.mean_climate.lib.std_xy(do_mo, var) + std_xy_mo = pcmdi_metrics.mean_climate.lib.std_xy(dm_mo, var) + + # CALCULATE MONTHLY OBS and MOD MEAN + meanObs_xy_mo = pcmdi_metrics.mean_climate.lib.mean_xy(do_mo, var) + mean_xy_mo = pcmdi_metrics.mean_climate.lib.mean_xy(dm_mo, var) + + rms_mo_l.append(format(rms_mo * conv, sig_digits)) + rmsc_mo_l.append(format(rmsc_mo * conv, sig_digits)) + cor_mo_l.append(format(cor_mo, ".2f")) + mae_mo_l.append(format(mae_mo * conv, sig_digits)) + bias_mo_l.append(format(bias_mo * conv, sig_digits)) + stdObs_xy_mo_l.append(format(stdObs_xy_mo * conv, sig_digits)) + std_xy_mo_l.append(format(std_xy_mo * conv, sig_digits)) + meanObs_xy_mo_l.append(format(meanObs_xy_mo * conv, sig_digits)) + mean_xy_mo_l.append(format(mean_xy_mo * conv, sig_digits)) + + metrics_dictionary["bias_xy"]["CalendarMonths"] = bias_mo_l + metrics_dictionary["rms_xy"]["CalendarMonths"] = rms_mo_l + metrics_dictionary["rmsc_xy"]["CalendarMonths"] = rmsc_mo_l + metrics_dictionary["cor_xy"]["CalendarMonths"] = cor_mo_l + metrics_dictionary["mae_xy"]["CalendarMonths"] = mae_mo_l + metrics_dictionary["std-obs_xy"]["CalendarMonths"] = stdObs_xy_mo_l + metrics_dictionary["std_xy"]["CalendarMonths"] = std_xy_mo_l + metrics_dictionary["mean-obs_xy"]["CalendarMonths"] = meanObs_xy_mo_l + metrics_dictionary["mean_xy"]["CalendarMonths"] = mean_xy_mo_l + + return metrics_dictionary + + +# ZONAL AND SEASONAL MEAN CONTRIBUTIONS +# metrics_dictionary['rms_y'][sea] = format( +# rms_y * conv, +# sig_digits) +# metrics_dictionary['rms_devzm'][sea] = format( +# rms_xy_devzm * conv, +# sig_digits) diff --git a/pcmdi_metrics/mean_climate/lib/compute_statistics.py b/pcmdi_metrics/mean_climate/lib/compute_statistics.py new file mode 100644 index 000000000..a5abdfcaa --- /dev/null +++ b/pcmdi_metrics/mean_climate/lib/compute_statistics.py @@ -0,0 +1,265 @@ +import math + +import numpy as np + + +def annual_mean(dm, do, var=None): + """Computes ANNUAL MEAN""" + if dm is None and do is None: # just want the doc + return { + "Name": "Annual Mean", + "Abstract": "Compute Annual Mean", + "Contact": "pcmdi-metrics@llnl.gov", + "Comments": "Assumes input are 12 months climatology", + } + dm_am = dm.temporal.average(var) + do_am = do.temporal.average(var) + return dm_am, do_am # DataSets + + +def seasonal_mean(d, season, var=None): + """Computes SEASONAL MEAN""" + if d is None and season is None: # just want the doc + return { + "Name": "Seasonal Mean", + "Abstract": "Compute Seasonal Mean", + "Contact": "pcmdi-metrics@llnl.gov", + "Comments": "Assumes input are 12 months climatology", + } + + mo_wts = [31, 31, 28.25, 31, 30, 31, 30, 31, 31, 30, 31, 30] + + if season == "djf": + indx = [11, 0, 1] + if season == "mam": + indx = [2, 3, 4] + if season == "jja": + indx = [5, 6, 7] + if season == "son": + indx = [8, 9, 10] + + season_num_days = mo_wts[indx[0]] + mo_wts[indx[1]] + mo_wts[indx[2]] + + d_season = ( + d.isel(time=indx[0])[var] * mo_wts[indx[0]] + + d.isel(time=indx[1])[var] * mo_wts[indx[1]] + + d.isel(time=indx[2])[var] * mo_wts[indx[2]] + ) / season_num_days + + ds_new = d.isel(time=0).copy(deep=True) + ds_new[var] = d_season + + return ds_new + + +# Metrics calculations + + +def bias_xy(dm, do, var=None, weights=None): + """Computes bias""" + if dm is None and do is None: # just want the doc + return { + "Name": "Bias", + "Abstract": "Compute Full Average of Model - Observation", + "Contact": "pcmdi-metrics@llnl.gov", + } + dif = dm[var] - do[var] + if weights is None: + weights = dm.spatial.get_weights(axis=['X', 'Y']) + stat = float(dif.weighted(weights).mean(("lon", "lat"))) + return float(stat) + + +def bias_xyt(dm, do, var=None): + """Computes bias""" + if dm is None and do is None: # just want the doc + return { + "Name": "Bias", + "Abstract": "Compute Full Average of Model - Observation", + "Contact": "pcmdi-metrics@llnl.gov", + } + ds = dm.copy(deep=True) + ds['dif'] = dm[var] - do[var] + stat = ds.spatial.average('dif', axis=['X', 'Y']).temporal.average('dif')['dif'].values + return float(stat) + + +def cor_xy(dm, do, var=None, weights=None): + """Computes correlation""" + if dm is None and do is None: # just want the doc + return { + "Name": "Spatial Correlation", + "Abstract": "Compute Spatial Correlation", + "Contact": "pcmdi-metrics@llnl.gov", + } + if weights is None: + weights = dm.spatial.get_weights(axis=['X', 'Y']) + + dm_avg = dm.spatial.average(var, axis=['X', 'Y'], weights=weights)[var].values + do_avg = do.spatial.average(var, axis=['X', 'Y'], weights=weights)[var].values + + covariance = ((dm[var] - dm_avg) * (do[var] - do_avg)).weighted(weights).mean(dim=['lon', 'lat']).values + std_dm = std_xy(dm, var) + std_do = std_xy(do, var) + stat = covariance / (std_dm * std_do) + + return float(stat) + + +def mean_xy(d, var=None, weights=None): + """Computes bias""" + if d is None: # just want the doc + return { + "Name": "Mean", + "Abstract": "Area Mean (area weighted)", + "Contact": "pcmdi-metrics@llnl.gov", + } + + if weights is None: + weights = d.spatial.get_weights(axis=['X', 'Y']) + stat = float(d[var].weighted(weights).mean(("lon", "lat"))) + return float(stat) + + +def meanabs_xy(dm, do, var=None, weights=None): + """Computes Mean Absolute Error""" + if dm is None and do is None: # just want the doc + return { + "Name": "Mean Absolute Error", + "Abstract": "Compute Full Average of " + + "Absolute Difference Between Model And Observation", + "Contact": "pcmdi-metrics@llnl.gov", + } + if weights is None: + weights = dm.spatial.get_weights(axis=['X', 'Y']) + dif = abs(dm[var] - do[var]) + stat = dif.weighted(weights).mean(("lon", "lat")) + return float(stat) + + +def meanabs_xyt(dm, do, var=None): + """Computes Mean Absolute Error""" + if dm is None and do is None: # just want the doc + return { + "Name": "Mean Absolute Error", + "Abstract": "Compute Full Average of " + + "Absolute Difference Between Model And Observation", + "Contact": "pcmdi-metrics@llnl.gov", + } + ds = dm.copy(deep=True) + ds['absdif'] = abs(dm[var] - do[var]) + stat = ds.spatial.average('absdif', axis=['X', 'Y']).temporal.average('absdif')['absdif'].values + return float(stat) + + +def rms_0(dm, do, var=None, weighted=True): + """Computes rms over first axis -- compare two zonal mean fields""" + if dm is None and do is None: # just want the doc + return { + "Name": "Root Mean Square over First Axis", + "Abstract": "Compute Root Mean Square over the first axis", + "Contact": "pcmdi-metrics@llnl.gov", + } + dif_square = (dm[var] - do[var])**2 + if weighted: + weights = dm.spatial.get_weights(axis=['Y']) + stat = math.sqrt(dif_square.weighted(weights).mean(("lat"))) + else: + stat = math.sqrt(dif_square.mean(("lat"))) + return float(stat) + + +def rms_xy(dm, do, var=None, weights=None): + """Computes rms""" + if dm is None and do is None: # just want the doc + return { + "Name": "Spatial Root Mean Square", + "Abstract": "Compute Spatial Root Mean Square", + "Contact": "pcmdi-metrics@llnl.gov", + } + dif_square = (dm[var] - do[var])**2 + if weights is None: + weights = dm.spatial.get_weights(axis=['X', 'Y']) + stat = math.sqrt(dif_square.weighted(weights).mean(("lon", "lat"))) + return float(stat) + + +def rms_xyt(dm, do, var=None): + """Computes rms""" + if dm is None and do is None: # just want the doc + return { + "Name": "Spatio-Temporal Root Mean Square", + "Abstract": "Compute Spatial and Temporal Root Mean Square", + "Contact": "pcmdi-metrics@llnl.gov", + } + ds = dm.copy(deep=True) + ds['diff_square'] = (dm[var] - do[var])**2 + ds['diff_square_sqrt'] = np.sqrt(ds.spatial.average('diff_square', axis=['X', 'Y'])['diff_square']) + stat = ds.temporal.average('diff_square_sqrt')['diff_square_sqrt'].values + return float(stat) + + +def rmsc_xy(dm, do, var=None, weights=None): + """Computes centered rms""" + if dm is None and do is None: # just want the doc + return { + "Name": "Spatial Root Mean Square", + "Abstract": "Compute Centered Spatial Root Mean Square", + "Contact": "pcmdi-metrics@llnl.gov", + } + if weights is None: + weights = dm.spatial.get_weights(axis=['X', 'Y']) + + dm_anomaly = dm[var] - dm[var].weighted(weights).mean(("lon", "lat")) + do_anomaly = do[var] - do[var].weighted(weights).mean(("lon", "lat")) + diff_square = (dm_anomaly - do_anomaly)**2 + + stat = math.sqrt(diff_square.weighted(weights).mean(("lon", "lat"))) + return float(stat) + + +def std_xy(d, var=None, weights=None): + """Computes std""" + if d is None: # just want the doc + return { + "Name": "Spatial Standard Deviation", + "Abstract": "Compute Spatial Standard Deviation", + "Contact": "pcmdi-metrics@llnl.gov", + } + if weights is None: + weights = d.spatial.get_weights(axis=['X', 'Y']) + average = float(d[var].weighted(weights).mean(("lon", "lat"))) + anomaly = (d[var] - average)**2 + variance = float(anomaly.weighted(weights).mean(("lon", "lat"))) + std = math.sqrt(variance) + return float(std) + + +def std_xyt(d, var=None): + """Computes std""" + if d is None: # just want the doc + return { + "Name": "Spatial-temporal Standard Deviation", + "Abstract": "Compute Space-Time Standard Deviation", + "Contact": "pcmdi-metrics@llnl.gov", + } + ds = d.copy(deep=True) + average = d.spatial.average(var, axis=['X', 'Y']).temporal.average(var)[var] + ds['anomaly'] = (d[var] - average)**2 + variance = ds.spatial.average('anomaly').temporal.average('anomaly')['anomaly'].values + std = math.sqrt(variance) + return(std) + + +def zonal_mean(dm, do, var=None): + """Computes ZONAL MEAN assumes rectilinear/regular grid""" + if dm is None and do is None: # just want the doc + return { + "Name": "Zonal Mean", + "Abstract": "Compute Zonal Mean", + "Contact": "pcmdi-metrics@llnl.gov", + "Comments": "", + } + dm_zm = dm.spatial.average(var, axis=['X']) + do_zm = do.spatial.average(var, axis=['X']) + return dm_zm, do_zm # DataSets diff --git a/pcmdi_metrics/mean_climate/lib/create_mean_climate_parser.py b/pcmdi_metrics/mean_climate/lib/create_mean_climate_parser.py new file mode 100644 index 000000000..d06447692 --- /dev/null +++ b/pcmdi_metrics/mean_climate/lib/create_mean_climate_parser.py @@ -0,0 +1,259 @@ +#!/usr/bin/env python +import ast + +from pcmdi_metrics.mean_climate.lib import pmp_parser + + +def create_mean_climate_parser(): + parser = pmp_parser.PMPMetricsParser() + parser.add_argument( + "--case_id", + dest="case_id", + help="Defines a subdirectory to the metrics output, so multiple" + + "cases can be compared", + required=False, + ) + + parser.add_argument( + "-v", + "--vars", + type=str, + nargs="+", + dest="vars", + help="Variables to use", + required=False, + ) + + parser.add_argument( + "--regions", + type=ast.literal_eval, + dest="regions", + help="Regions on which to run the metrics", + required=False, + ) + + parser.add_argument( + "--regions_values", + type=ast.literal_eval, + dest="regions_values", + help="Users can customize regions values names", + required=False, + ) + + parser.add_argument( + "--regions_specs", + type=ast.literal_eval, + dest="regions_specs", + help="Users can customize regions", + default=None, + required=False, + ) + + parser.add_argument( + "-r", + "--reference_data_set", + type=str, + nargs="+", + dest="reference_data_set", + help="List of observations or models that are used as a " + + "reference against the test_data_set", + required=False, + ) + + parser.add_argument( + "--reference_data_path", + dest="reference_data_path", + help="Path for the reference climitologies", + required=False, + ) + + parser.add_argument( + "-t", + "--test_data_set", + type=str, + nargs="+", + dest="test_data_set", + help="List of observations or models to test " + + "against the reference_data_set", + required=False, + ) + + parser.add_argument( + "--test_data_path", + dest="test_data_path", + help="Path for the test climitologies", + required=False, + ) + + parser.add_argument( + "--target_grid", + dest="target_grid", + help='Options are "2.5x2.5" or an actual cdms2 grid object', + required=False, + ) + + parser.add_argument( + "--regrid_tool", + dest="regrid_tool", + help='Options are "regrid2" or "esmf"', + required=False, + ) + + parser.add_argument( + "--regrid_method", + dest="regrid_method", + help='Options are "linear" or "conservative", ' + + 'only if regrid_tool is "esmf"', + required=False, + ) + + parser.add_argument( + "--regrid_tool_ocn", + dest="regrid_tool_ocn", + help='Options are "regrid2" or "esmf"', + required=False, + ) + + parser.add_argument( + "--regrid_method_ocn", + dest="regrid_method_ocn", + help='Options are "linear" or "conservative", ' + + 'only if regrid_tool is "esmf"', + required=False, + ) + + parser.add_argument( + "--period", dest="period", help="A simulation parameter", required=False + ) + + parser.add_argument( + "--realization", + dest="realization", + help="A simulation parameter", + required=False, + ) + + parser.add_argument( + "--simulation_description_mapping", + type=ast.literal_eval, + dest="simulation_description_mapping", + help="List of observations or models to test " + + "against the reference_data_set", + default={}, + required=False, + ) + + parser.add_argument( + "--ext", + dest="ext", + help="Extension for the output files?", + required=False + ) + + parser.add_argument( + "--dry_run", + # If input is 'True' or 'true', return True. Otherwise False. + type=lambda x: x.lower() == "true", + dest="dry_run", + help="True if output is to be created, False otherwise", + required=False, + ) + + parser.add_argument( + "--filename_template", + dest="filename_template", + help="Template for climatology files", + required=False, + ) + + parser.add_argument( + "--sftlf_filename_template", + dest="sftlf_filename_template", + help='Filename template for landsea masks ("sftlf")', + required=False, + ) + + parser.add_argument( + "--custom_observations", + dest="custom_observations", + help="Path to an alternative, custom observation file", + required=False, + ) + + parser.add_argument( + "--metrics_output_path", + dest="metrics_output_path", + help="Directory of where to put the results", + required=False, + ) + + parser.add_argument( + "--diagnostics_output_path", + dest="diagnostics_output_path", + help="Directory of where to put the results", + default=None, + required=False, + ) + + parser.add_argument( + "--filename_output_template", + dest="filename_output_template", + help="Filename for the interpolated test climatologies", + required=False, + ) + + parser.add_argument( + "--save_test_clims", + # If input is 'True' or 'true', return True. Otherwise False. + type=lambda x: x.lower() == "true", + dest="save_test_clims", + help="True if to save interpolated test climatologies," + " otherwise False", + default=False, + required=False, + ) + + parser.add_argument( + "--test_clims_interpolated_output", + dest="test_clims_interpolated_output", + help="Directory of where to put the interpolated " + "test climatologies", + required=False, + ) + + parser.add_argument( + "--output_json_template", + help="Filename template for results json files", + required=False, + ) + + parser.add_argument( + "--user_notes", + dest="user_notes", + help="Provide a short description to help identify this run of the PMP mean climate.", + required=False, + ) + + parser.add_argument( + "--debug", + dest="debug", + action="store_true", + help="Turn on debugging mode by printing more information to track progress", + required=False, + ) + + parser.add_argument( + "--cmec", + dest="cmec", + action="store_true", + help="Save metrics in CMEC format", + required=False, + ) + + parser.add_argument( + "--no_cmec", + dest="cmec", + action="store_false", + help="Option to not save metrics in CMEC format", + required=False, + ) + + return parser \ No newline at end of file diff --git a/pcmdi_metrics/mean_climate/lib/load_and_regrid.py b/pcmdi_metrics/mean_climate/lib/load_and_regrid.py new file mode 100644 index 000000000..2e1231e1a --- /dev/null +++ b/pcmdi_metrics/mean_climate/lib/load_and_regrid.py @@ -0,0 +1,62 @@ +from pcmdi_metrics.io import xcdat_open +import cftime +import xcdat as xc +import numpy as np + +def load_and_regrid(data_path, varname, level=None, t_grid=None, decode_times=True, regrid_tool='regrid2', debug=False): + """Load data and regrid to target grid + + Args: + data_path (str): full data path for nc or xml file + varname (str): variable name + level (float): level to extract (unit in hPa) + t_grid (xarray.core.dataset.Dataset): target grid to regrid + decode_times (bool): Default is True. decode_times=False will be removed once obs4MIP written using xcdat + regrid_tool (str): Name of the regridding tool. See https://xcdat.readthedocs.io/en/stable/generated/xarray.Dataset.regridder.horizontal.html for more info + debug (bool): Default is False. If True, print more info to help debugging process + """ + if debug: + print('load_and_regrid start') + + # load data + ds = xcdat_open(data_path, data_var=varname, decode_times=decode_times) # NOTE: decode_times=False will be removed once obs4MIP written using xcdat + + # calendar quality check + if "calendar" in list(ds.time.attrs.keys()): + if debug: + print('ds.time.attrs["calendar"]:', ds.time.attrs["calendar"]) + if 'calendar' in ds.attrs.keys(): + if debug: + print('ds.calendar:', ds.calendar) + if ds.calendar != ds.time.attrs["calendar"]: + print('[WARNING]: calendar info mismatch. ds.time.attrs["calendar"] is adjusted to ds.calendar') + ds.time.attrs["calendar"] = ds.calendar + else: + if 'calendar' in ds.attrs.keys(): + ds.time.attrs["calendar"] = ds.calendar + + # time bound check -- add proper time bound info if cdms-generated annual cycle is loaded + if isinstance(ds.time.values[0], np.float64): # and "units" not in list(ds.time.attrs.keys()): + ds.time.attrs['units'] = "days since 0001-01-01" + ds = xc.decode_time(ds) + if debug: + print('decode_time done') + + # level - extract a specific level if needed + if level is not None: + level = level * 100 # hPa to Pa + ds = ds.sel(plev=level) + if debug: + print('ds:', ds) + + # regrid + if regrid_tool == 'regrid2': + ds_regridded = ds.regridder.horizontal(varname, t_grid, tool=regrid_tool) + elif regrid_tool in ['esmf', 'xesmf']: + regrid_tool = 'xesmf' + regrid_method = 'bilinear' + ds_regridded = ds.regridder.horizontal(varname, t_grid, tool=regrid_tool, method=regrid_method) + + if debug: + print('ds_regridded:', ds_regridded) + return ds_regridded diff --git a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_to_json.py b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_to_json.py new file mode 100644 index 000000000..b614f3559 --- /dev/null +++ b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_to_json.py @@ -0,0 +1,53 @@ +import json +from copy import deepcopy + +from pcmdi_metrics.io.base import Base + + +def mean_climate_metrics_to_json( + outdir, json_filename, result_dict, + model=None, run=None, + cmec_flag=False, debug=False +): + # Open JSON + JSON = Base( + outdir, json_filename + ) + # Dict for JSON + json_dict = deepcopy(result_dict) + if model is not None or run is not None: + # Preserve only needed dict branch -- delete rest keys + models_in_dict = list(json_dict["RESULTS"].keys()) + for m in models_in_dict: + if m == model: + for ref in list(json_dict["RESULTS"][m].keys()): + runs_in_model_dict = list(json_dict["RESULTS"][m][ref].keys()) + for r in runs_in_model_dict: + if (r != run) and (run is not None): + del json_dict["RESULTS"][m][ref][r] + else: + del json_dict["RESULTS"][m] + # Write selected dict to JSON + JSON.write( + json_dict, + json_structure=[ + "model", + "reference", + "rip", + "region", + "statistic", + "season", + ], + indent=4, + separators=(",", ": "), + mode="r+", + sort_keys=False, + ) + + if debug: + print('in mean_climate_metrics_to_json, model, run:', model, run) + print('json_dict:', json.dumps(json_dict, sort_keys=True, indent=4)) + + if cmec_flag: + print("Writing cmec file") + JSON.write_cmec(indent=4, separators=(",", ": ")) diff --git a/pcmdi_metrics/driver/pmp_parameter.py b/pcmdi_metrics/mean_climate/lib/pmp_parameter.py similarity index 100% rename from pcmdi_metrics/driver/pmp_parameter.py rename to pcmdi_metrics/mean_climate/lib/pmp_parameter.py diff --git a/pcmdi_metrics/driver/pmp_parser.py b/pcmdi_metrics/mean_climate/lib/pmp_parser.py similarity index 84% rename from pcmdi_metrics/driver/pmp_parser.py rename to pcmdi_metrics/mean_climate/lib/pmp_parser.py index 1f62d4d20..cc4bfed9a 100644 --- a/pcmdi_metrics/driver/pmp_parser.py +++ b/pcmdi_metrics/mean_climate/lib/pmp_parser.py @@ -2,8 +2,11 @@ import cdp.cdp_parser -import pcmdi_metrics.driver.pmp_parameter from pcmdi_metrics import resources +from pcmdi_metrics.mean_climate.lib.pmp_parameter import ( + PMPMetricsParameter, + PMPParameter, +) try: basestring # noqa @@ -21,7 +24,7 @@ def path_to_default_args(): class PMPParser(cdp.cdp_parser.CDPParser): def __init__(self, *args, **kwargs): super(PMPParser, self).__init__( - pcmdi_metrics.driver.pmp_parameter.PMPParameter, + PMPParameter, path_to_default_args(), *args, **kwargs, @@ -33,7 +36,7 @@ def __init__(self, *args, **kwargs): class PMPMetricsParser(cdp.cdp_parser.CDPParser): def __init__(self, *args, **kwargs): super(PMPMetricsParser, self).__init__( - pcmdi_metrics.driver.pmp_parameter.PMPMetricsParameter, + PMPMetricsParameter, path_to_default_args(), *args, **kwargs, diff --git a/pcmdi_metrics/mean_climate/mean_climate_driver.py b/pcmdi_metrics/mean_climate/mean_climate_driver.py new file mode 100755 index 000000000..476133de0 --- /dev/null +++ b/pcmdi_metrics/mean_climate/mean_climate_driver.py @@ -0,0 +1,280 @@ +#!/usr/bin/env python + +import glob +import json +import os +from re import split +from collections import OrderedDict + +import cdms2 +import cdutil +import numpy as np +import xcdat as xc + +from pcmdi_metrics import resources +from pcmdi_metrics.io import load_regions_specs, region_subset +from pcmdi_metrics.mean_climate.lib import ( + compute_metrics, + create_mean_climate_parser, + load_and_regrid, + mean_climate_metrics_to_json, +) +from pcmdi_metrics.variability_mode.lib import tree + + +parser = create_mean_climate_parser() +parameter = parser.get_parameter(argparse_vals_only=False) + +# parameters +case_id = parameter.case_id +test_data_set = parameter.test_data_set +realization = parameter.realization +vars = parameter.vars +reference_data_set = parameter.reference_data_set +target_grid = parameter.target_grid +regrid_tool = parameter.regrid_tool +regrid_tool_ocn = parameter.regrid_tool_ocn +save_test_clims = parameter.save_test_clims +test_clims_interpolated_output = parameter.test_clims_interpolated_output +filename_template = parameter.filename_template +sftlf_filename_template = parameter.sftlf_filename_template +generate_sftlf = parameter.generate_sftlf +regions_specs = parameter.regions_specs +regions = parameter.regions +test_data_path = parameter.test_data_path +reference_data_path = parameter.reference_data_path +metrics_output_path = parameter.metrics_output_path +diagnostics_output_path = parameter.diagnostics_output_path +debug = parameter.debug +cmec = parameter.cmec + +if metrics_output_path is not None: + metrics_output_path = parameter.metrics_output_path.replace('%(case_id)', case_id) + +if diagnostics_output_path is None: + diagnostics_output_path = metrics_output_path.replace('metrics_results', 'diagnostic_results') + +diagnostics_output_path = diagnostics_output_path.replace('%(case_id)', case_id) + +find_all_realizations = False +if realization is None: + realization = "" + realizations = [realization] +elif isinstance(realization, str): + if realization.lower() in ["all", "*"]: + find_all_realizations = True + else: + realizations = [realization] + +if debug: + print('regions_specs (before loading internally defined):', regions_specs) + +if regions_specs is None or not bool(regions_specs): + regions_specs = load_regions_specs() + +default_regions = ['global', 'NHEX', 'SHEX', 'TROPICS'] +print( + 'case_id: ', case_id, '\n', + 'test_data_set:', test_data_set, '\n', + 'realization:', realization, '\n', + 'vars:', vars, '\n', + 'reference_data_set:', reference_data_set, '\n', + 'target_grid:', target_grid, '\n', + 'regrid_tool:', regrid_tool, '\n', + 'regrid_tool_ocn:', regrid_tool_ocn, '\n', + 'save_test_clims:', save_test_clims, '\n', + 'test_clims_interpolated_output:', test_clims_interpolated_output, '\n', + 'filename_template:', filename_template, '\n', + 'sftlf_filename_template:', sftlf_filename_template, '\n', + 'generate_sftlf:', generate_sftlf, '\n', + 'regions_specs:', regions_specs, '\n', + 'regions:', regions, '\n', + 'test_data_path:', test_data_path, '\n', + 'reference_data_path:', reference_data_path, '\n', + 'metrics_output_path:', metrics_output_path, '\n', + 'diagnostics_output_path:', diagnostics_output_path, '\n', + 'debug:', debug, '\n') + +print('--- prepare mean climate metrics calculation ---') + +# generate target grid +if target_grid == "2.5x2.5": + # target grid for regridding + t_grid = xc.create_uniform_grid(-88.875, 88.625, 2.5, 0, 357.5, 2.5) + if debug: + print('type(t_grid):', type(t_grid)) # Expected type is 'xarray.core.dataset.Dataset' + print('t_grid:', t_grid) + # identical target grid in cdms2 to use generateLandSeaMask function that is yet to exist in xcdat + t_grid_cdms2 = cdms2.createUniformGrid(-88.875, 72, 2.5, 0, 144, 2.5) + # generate land sea mask for the target grid + sft = cdutil.generateLandSeaMask(t_grid_cdms2) + if debug: + print('sft:', sft) + print('sft.getAxisList():', sft.getAxisList()) + # add sft to target grid dataset + t_grid['sftlf'] = (['lat', 'lon'], np.array(sft)) + if debug: + print('t_grid (after sftlf added):', t_grid) + t_grid.to_netcdf('target_grid.nc') + +# load obs catalogue json +egg_pth = resources.resource_path() +obs_file_name = "obs_info_dictionary.json" +obs_file_path = os.path.join(egg_pth, obs_file_name) +with open(obs_file_path) as fo: + obs_dict = json.loads(fo.read()) +# if debug: + # print('obs_dict:', json.dumps(obs_dict, indent=4, sort_keys=True)) + +print('--- start mean climate metrics calculation ---') + +# ------------- +# variable loop +# ------------- +for var in vars: + + if '_' in var or '-' in var: + varname = split('_|-', var)[0] + level = float(split('_|-', var)[1]) + else: + varname = var + level = None + + if varname not in list(regions.keys()): + regions[varname] = default_regions + + print('varname:', varname) + print('level:', level) + + # set dictionary for .json record + result_dict = tree() + + # ---------------- + # observation loop + # ---------------- + if "all" in reference_data_set: + reference_data_set = [x for x in list(obs_dict[varname].keys()) if (x == "default" or "alternate" in x)] + print("reference_data_set (all): ", reference_data_set) + + for ref in reference_data_set: + print('ref:', ref) + # identify data to load (annual cycle (AC) data is loading in) + ref_dataset_name = obs_dict[varname][ref] + ref_data_full_path = os.path.join( + reference_data_path, + obs_dict[varname][ref_dataset_name]["template"]) + print('ref_data_full_path:', ref_data_full_path) + # load data and regrid + ds_ref = load_and_regrid(ref_data_full_path, varname, level, t_grid, decode_times=False, regrid_tool=regrid_tool, debug=debug) + ds_ref_dict = OrderedDict() + + # ---------- + # model loop + # ---------- + for model in test_data_set: + + if find_all_realizations: + test_data_full_path = os.path.join( + test_data_path, + filename_template).replace('%(variable)', varname).replace('%(model)', model).replace('%(model_version)', model).replace('%(realization)', '*') + ncfiles = glob.glob(test_data_full_path) + realizations = [] + for ncfile in ncfiles: + realizations.append(ncfile.split('/')[-1].split('.')[3]) + print('=================================') + print('model, runs:', model, realizations) + + for run in realizations: + # identify data to load (annual cycle (AC) data is loading in) + test_data_full_path = os.path.join( + test_data_path, + filename_template).replace('%(variable)', varname).replace('%(model)', model).replace('%(model_version)', model).replace('%(realization)', run) + if os.path.exists(test_data_full_path): + print('-----------------------') + print('model, run:', model, run) + print('test_data (model in this case) full_path:', test_data_full_path) + try: + ds_test_dict = OrderedDict() + + # load data and regrid + ds_test = load_and_regrid(test_data_full_path, varname, level, t_grid, decode_times=True, regrid_tool=regrid_tool, debug=debug) + print('load and regrid done') + + # ----------- + # region loop + # ----------- + for region in regions[varname]: + print('region:', region) + + # land/sea mask -- conduct masking only for variable data array, not entire data + if ('land' in region.split('_')) or ('ocean' in region.split('_')): + ds_test_tmp = ds_test.copy(deep=True) + ds_ref_tmp = ds_ref.copy(deep=True) + if 'land' in region.split('_'): + ds_test_tmp[varname] = ds_test[varname].where(t_grid['sftlf'] != 0.) + ds_ref_tmp[varname] = ds_ref[varname].where(t_grid['sftlf'] != 0.) + elif 'ocean' in region.split('_'): + ds_test_tmp[varname] = ds_test[varname].where(t_grid['sftlf'] == 0.) + ds_ref_tmp[varname] = ds_ref[varname].where(t_grid['sftlf'] == 0.) + print('mask done') + else: + ds_test_tmp = ds_test + ds_ref_tmp = ds_ref + + # spatial subset + if region.lower() in ['global', 'land', 'ocean']: + ds_test_dict[region] = ds_test_tmp + if region not in list(ds_ref_dict.keys()): + ds_ref_dict[region] = ds_ref_tmp + else: + ds_test_tmp = region_subset(ds_test_tmp, regions_specs, region=region) + ds_test_dict[region] = ds_test_tmp + if region not in list(ds_ref_dict.keys()): + ds_ref_dict[region] = region_subset(ds_ref_tmp, regions_specs, region=region) + print('spatial subset done') + + if save_test_clims and ref == reference_data_set[0]: + test_clims_dir = os.path.join( + diagnostics_output_path, var, 'interpolated_model_clims') + os.makedirs(test_clims_dir, exist_ok=True) + test_clims_file = os.path.join( + test_clims_dir, + '_'.join([var, model, run, 'interpolated', regrid_tool, region, 'AC', case_id + '.nc'])) + ds_test_dict[region].to_netcdf(test_clims_file) + + if debug: + print('ds_test_tmp:', ds_test_tmp) + ds_test_dict[region].to_netcdf('_'.join([var, 'model', model, run, region + '.nc'])) + if model == test_data_set[0] and run == realizations[0]: + ds_ref_dict[region].to_netcdf('_'.join([var, 'ref', region + '.nc'])) + + # compute metrics + print('compute metrics start') + result_dict["RESULTS"][model][ref][run][region] = compute_metrics(varname, ds_test_dict[region], ds_ref_dict[region], debug=debug) + + # write individual JSON + # --- single simulation, obs (need to accumulate later) / single variable + json_filename_tmp = "_".join([model, var, target_grid, regrid_tool, "metrics", ref]) + mean_climate_metrics_to_json( + os.path.join(metrics_output_path, var), + json_filename_tmp, + result_dict, + model=model, + run=run, + cmec_flag=cmec, + debug=debug + ) + + except Exception as e: + print('error occured for ', model, run) + print(e) + + # write collective JSON --- all models / all obs / single variable + json_filename = "_".join([var, target_grid, regrid_tool, "metrics"]) + mean_climate_metrics_to_json( + metrics_output_path, + json_filename, + result_dict, + cmec_flag=cmec, + ) + print('pmp mean clim driver completed') diff --git a/pcmdi_metrics/mean_climate/param/basic_annual_cycle_param.py b/pcmdi_metrics/mean_climate/param/basic_annual_cycle_param.py new file mode 100644 index 000000000..29d40c1ea --- /dev/null +++ b/pcmdi_metrics/mean_climate/param/basic_annual_cycle_param.py @@ -0,0 +1,15 @@ +# VARIABLES TO USE +vars = ['pr'] +# vars = ['ua', 'ta'] +vars = ['pr', 'ua', 'ta'] + +# START AND END DATES FOR CLIMATOLOGY +start = '1981-01' +# end = '1983-12' +end = '2005-12' + +# INPUT DATASET - CAN BE MODEL OR OBSERVATIONS +infile = '/work/lee1043/ESGF/E3SMv2/atmos/mon/cmip6.E3SMv2.historical.r1i1p1f1.mon.%(variable).xml' + +# DIRECTORY WHERE TO PUT RESULTS +outfile = 'clim/cmip6.historical.E3SMv2.r1i1p1.mon.%(variable).nc' diff --git a/pcmdi_metrics/mean_climate/param/basic_param.py b/pcmdi_metrics/mean_climate/param/basic_param.py new file mode 100644 index 000000000..c2d68dae1 --- /dev/null +++ b/pcmdi_metrics/mean_climate/param/basic_param.py @@ -0,0 +1,81 @@ +import os + +# +# OPTIONS ARE SET BY USER IN THIS FILE AS INDICATED BELOW BY: +# +# + +# RUN IDENTIFICATION +# DEFINES A SUBDIRECTORY TO METRICS OUTPUT RESULTS SO MULTIPLE CASES CAN +# BE COMPARED +case_id = 'v20221130' + +# LIST OF MODEL VERSIONS TO BE TESTED - WHICH ARE EXPECTED TO BE PART OF +# CLIMATOLOGY FILENAME +test_data_set = ['E3SMv2'] + + +# VARIABLES TO USE +# vars = ['pr', 'ua_850'] +# vars = ['pr'] +# vars = ['ta-850'] +vars = ['ua-850'] + + +# Observations to use at the moment "default" or "alternate" +# reference_data_set = ['all'] +reference_data_set = ['default'] +# ext = '.nc' + +# INTERPOLATION OPTIONS +target_grid = '2.5x2.5' # OPTIONS: '2.5x2.5' or an actual cdms2 grid object +regrid_tool = 'regrid2' # 'regrid2' # OPTIONS: 'regrid2','esmf' +# OPTIONS: 'linear','conservative', only if tool is esmf +regrid_method = 'linear' +regrid_tool_ocn = 'esmf' # OPTIONS: "regrid2","esmf" +# OPTIONS: 'linear','conservative', only if tool is esmf +regrid_method_ocn = 'linear' + +# SAVE INTERPOLATED MODEL CLIMATOLOGIES? +save_test_clims = True # True or False + +# DIRECTORY WHERE TO PUT INTERPOLATED MODELS' CLIMATOLOGIES +test_clims_interpolated_output = './interpolated_model_clims' + + +# Templates for climatology files +# %(param) will subsitute param with values in this file +filename_template = "cmip6.historical.E3SMv2.r1i1p1.mon.%(variable).198101-200512.AC.v20221027.nc" + +# filename template for landsea masks ('sftlf') +# sftlf_filename_template = "sftlf_fx_E3SM-1-0_historical_r1i1p1f1_gr.nc" +# sftlf_filename_template = "/p/user_pub/work/CMIP6/CMIP/E3SM-Project/E3SM-2-0/piControl/r1i1p1f1/fx/sftlf/gr/v20220913/sftlf_fx_E3SM-2-0_piControl_r1i1p1f1_gr.nc" +sftlf_filename_template = "sftlf_fx_E3SM-2-0_piControl_r1i1p1f1_gr.nc" +# sftlf_filename_template = None + +generate_sftlf = False # if land surface type mask cannot be found, generate one +# generate_sftlf = True # if land surface type mask cannot be found, generate one + +# Region (if not given, default region applied: global, NHEX, SHEX, TROPICS) +regions = { + # "pr": ["global", "NHEX", "SHEX", "TROPICS", "land_NHEX", "ocean_SHEX"], + "pr": ["global"], + # "pr": ["land", "ocean", "land_TROPICS", "ocean_SHEX"], + "ua": ["global"], + "ta": ["global", "NHEX", "SHEX", "TROPICS", "land_NHEX", "ocean_SHEX"], + # "ta": ["NHEX"], + # "ta": ["land_NHEX"] + # "ta": ["global"] +} + +# ROOT PATH FOR MODELS CLIMATOLOGIES +# test_data_path = '/work/lee1043/ESGF/E3SMv2/atmos/mon' +test_data_path = './clim' +# ROOT PATH FOR OBSERVATIONS +# Note that atm/mo/%(variable)/ac will be added to this +reference_data_path = '/p/user_pub/PCMDIobs/obs4MIPs_clims' + +# DIRECTORY WHERE TO PUT RESULTS +metrics_output_path = os.path.join( + 'output', + "%(case_id)") diff --git a/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py b/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py new file mode 100755 index 000000000..30829369e --- /dev/null +++ b/pcmdi_metrics/mean_climate/param/pcmdi_MIP_EXP_pmp_parameterfile.py @@ -0,0 +1,161 @@ +import datetime +import json +import os +import sys + +ver = datetime.datetime.now().strftime('v%Y%m%d') + +# ############################################################################### +# OPTIONS ARE SET BY USER IN THIS FILE AS INDICATED BELOW BY: +# ############################################################################### +case_id = ver + +# MIP = 'cmip6' # 'CMIP6' +MIP = 'cmip5' # 'CMIP6' +exp = 'historical' +# exp = 'amip' +# exp = 'picontrol' + +user_notes = "Provenance and results" +metrics_in_single_file = 'y' # 'y' or 'n' + +cmec = False # True + +# ################################################################ + +if MIP == 'cmip6': + modver = 'v20230202' +if MIP == 'cmip5': + modver = 'v20230208' + +# LIST OF MODEL VERSIONS TO BE TESTED - WHICH ARE EXPECTED TO BE PART OF CLIMATOLOGY FILENAME + +all_mods_dic = json.load(open('all_mip_mods-' + modver + '.json')) +# all_mods_dic = ['E3SM-1-0', 'ACCESS-CM2'] + +# test_data_set = all_mods_dic +test_data_set = all_mods_dic[MIP][exp] +test_data_set.sort() +# test_data_set = ['ACCESS-CM2'] + +print(len(test_data_set), ' ', test_data_set) +print('----------------------------------------------------------------') + +simulation_description_mapping = {"creation_date": "creation_date", "tracking_id": 'tracking_id', } + +# VARIABLES AND OBSERVATIONS TO USE + +realm = 'Amon' +# realm = 'Omon' + +vars = ['ts', 'pr'] + +# MODEL SPECIFIC PARAMETERS +model_tweaks = { + # Keys are model accronym or None which applies to all model entries + None: {"variable_mapping": {"rlwcrf1": "rlutcre1"}}, # Variables name mapping + "GFDL-ESM2G": {"variable_mapping": {"tos": "tos"}}, +} + +# Region (if not given, default region applied: global, NHEX, SHEX, TROPICS) +regions = { + # "pr": ["global", "NHEX", "SHEX", "TROPICS", "land_NHEX", "ocean_SHEX"], + "pr": ["global"], + # "pr": ["land", "ocean", "land_TROPICS", "ocean_SHEX"], + "ua": ["global"], + "ta": ["global", "NHEX", "SHEX", "TROPICS", "land_NHEX", "ocean_SHEX"], + # "ta": ["NHEX"], + # "ta": ["land_NHEX"] + # "ta": ["global"] + # "ts": ["global", "NHEX", "SHEX", "TROPICS", "ocean", "CONUS"], + # "ts": ["global"], + "ts": ["global", "CONUS"], + # "ts": ["CONUS"], +} + +# USER CAN CUSTOMIZE REGIONS VALUES NAMES +# regions_values = {"land": 100., "ocean": 0.} + +# Observations to use at the moment "default" or "alternate" +ref = 'all' +reference_data_set = ['default'] # ['default'] #, 'alternate1'] #, 'alternate', 'ref3'] +ext = '.xml' #'.nc' +ext = '.nc' + +# INTERPOLATION OPTIONS + +target_grid = '2.5x2.5' # OPTIONS: '2.5x2.5' or an actual cdms2 grid object +targetGrid = target_grid +target_grid_string = '2p5x2p5' +regrid_tool = 'regrid2' # 'esmf' #'regrid2' # OPTIONS: 'regrid2', 'esmf' +regrid_method = 'regrid2' # 'conservative' #'linear' # OPTIONS: 'linear', 'conservative', only if tool is esmf +regrid_tool_ocn = 'esmf' # OPTIONS: "regrid2", "esmf" +regrid_method_ocn = 'conservative' # OPTIONS: 'linear', 'conservative', only if tool is esmf + +# regrid_tool = 'esmf' #'esmf' #'regrid2' # OPTIONS: 'regrid2', 'esmf' +# regrid_method = 'linear' #'conservative' #'linear' # OPTIONS: 'linear', 'conservative', only if tool is esmf + +# SIMULATION PARAMETERg +period = '1981-2005' + +# realization = 'r1i1p1f1' +realization = 'all' + +# SAVE INTERPOLATED MODEL CLIMATOLOGIES ? +save_test_clims = True # True or False + +# DATA LOCATION: MODELS, OBS AND METRICS OUTPUT +# ################################################ +# Templates for climatology files + +verd = '*' +if exp == 'historical' and MIP == 'cmip5': + filename_template = MIP + '.historical.%(model).r1i1p1.mon.%(variable).198101-200512.AC.' + modver + '.nc' +if exp == 'amip' and MIP == 'cmip5': + filename_template = MIP + '.amip.%(model).r1i1p1.mon.%(variable).198101-200512.AC.' + modver + '.nc' +if exp == 'historical' and MIP == 'cmip6': + filename_template = MIP + '.historical.%(model).r1i1p1f1.mon.%(variable).198101-200512.AC.' + modver + '.nc' +if exp == 'amip' and MIP == 'cmip6': + filename_template = MIP + '.amip.%(model).r1i1p1f1.mon.%(variable).198101-200512.AC.' + modver + '.nc' +if exp == 'picontrol': + filename_template = "%(variable)_%(model)_%(table)_picontrol_%(exp)_r1i1p1_01-12-clim.nc" + +# Templates for MODEL land/sea mask (sftlf) +# filename template for landsea masks ('sftlf') +# sftlf_filename_template = "/work/gleckler1/processed_data/cmip5_fixed_fields/sftlf/sftlf_%(model).nc" + +generate_sftlf = True # ESTIMATE LAND SEA MASK IF NOT FOUND + +sftlf_filename_template = "cmip6.historical.%(model).sftlf.nc" # "sftlf_%(model).nc" + +# ROOT PATH FOR MODELS CLIMATOLOGIES +test_data_path = '/p/user_pub/pmp/pmp_results/pmp_v1.1.2/diagnostic_results/CMIP_CLIMS/' + MIP + '/' + exp + '/' + modver + '/%(variable)/' + +# ROOT PATH FOR OBSERVATIONS +reference_data_path = '/p/user_pub/PCMDIobs/obs4MIPs_clims/' +custom_observations = os.path.abspath('/p/user_pub/PCMDIobs/catalogue/obs4MIPs_PCMDI_clims_byVar_catalogue_v20210816.json') +# custom_observations = './obs4MIPs_PCMDI_clims_byVar_catalogue_v20210805_ljw.json' + +print('CUSTOM OBS ARE ', custom_observations) +if not os.path.exists(custom_observations): + sys.exit() + +# ###################################### +# DIRECTORY AND FILENAME FOR OUTPUTING METRICS RESULTS +# BY INDIVIDUAL MODELS +if metrics_in_single_file != 'y': + metrics_output_path = '/p/user_pub/pmp/pmp_results/pmp_v1.1.2/metrics_results/mean_climate/' + MIP + '/' + exp + '/%(case_id)/%(variable)%(level)/' # INDIVIDUAL MOD FILES + output_json_template = '%(model).%(variable)%(level).' + MIP + '.' + exp + '.%(regrid_method).' + target_grid_string + '.' + case_id # INDIVIDUAL MOD FILES +# ALL MODELS IN ONE FILE +if metrics_in_single_file == 'y': + metrics_output_path = '/p/user_pub/pmp/pmp_results/pmp_v1.1.2/metrics_results/mean_climate/' + MIP + '/' + exp + '/%(case_id)/' # All SAME FILE + output_json_template = '%(variable)%(level).' + MIP + '.' + exp + '.%(regrid_method).' + target_grid_string + '.' + case_id # ALL SAME FILE +# ####################################### + +# DIRECTORY WHERE TO PUT INTERPOLATED MODELS' CLIMATOLOGIES +test_clims_interpolated_output = '/p/user_pub/pmp/pmp_results/pmp_v1.1.2/diagnostic_results' + '/interpolated_model_clims/' + MIP + '/' + exp + '/' + case_id + +# FILENAME FOR INTERPOLATED CLIMATOLGIES OUTPUT +filename_output_template = MIP + ".%(model)." + exp + "." + realization + ".mo.%(variable)%(level).%(period).interpolated.%(regrid_method).%(region).AC." + case_id + "%(ext)" + +debug = False diff --git a/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py b/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py new file mode 100755 index 000000000..62a8c5307 --- /dev/null +++ b/pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py @@ -0,0 +1,71 @@ +#!/usr/bin/env python + +import datetime + +from genutil import StringConstructor + +from pcmdi_metrics.mean_climate.lib.pmp_parser import PMPMetricsParser +from pcmdi_metrics.mean_climate.lib import calculate_climatology + + +ver = datetime.datetime.now().strftime("v%Y%m%d") + +P = PMPMetricsParser() + +P.add_argument( + "--vars", dest="vars", help="List of variables", nargs="+", required=False +) +P.add_argument("--infile", dest="infile", help="Defines infile", required=False) +P.add_argument( + "--outfile", dest="outfile", help="Defines output path and filename", required=False +) +P.add_argument("--outpath", dest="outpath", help="Defines outpath only", required=False) +P.add_argument( + "--outfilename", + dest="outfilename", + help="Defines out filename only", + required=False, +) +P.add_argument( + "--start", dest="start", help="Defines start year and month", required=False +) +P.add_argument("--end", dest="end", help="Defines end year and month", required=False) + +args = P.get_parameter() + +infile_template = args.infile +outfile_template = args.outfile +outpath_template = args.outpath +outfilename_template = args.outfilename +varlist = args.vars +start = args.start +end = args.end + +print("start and end are ", start, " ", end) +print("variable list: ", varlist) +print("ver:", ver) + +InFile = StringConstructor(infile_template) +OutFile = StringConstructor(outfile_template) +OutFileName = StringConstructor(outfilename_template) +OutPath = StringConstructor(outpath_template) + +for var in varlist: + # Build filenames + InFile.variable = var + OutFile.variable = var + OutFileName.variable = var + OutPath.variable = var + infile = InFile() + outfile = OutFile() + outfilename = OutFileName() + outpath = OutPath() + + print('var:', var) + print('infile:', infile) + print('outfile:', outfile) + print('outfilename:', outfilename) + print('outpath:', outpath) + + # calculate climatologies for this variable + calculate_climatology(var, infile, outfile, outpath, outfilename, start, end, ver) diff --git a/pcmdi_metrics/mean_climate/scripts/README.md b/pcmdi_metrics/mean_climate/scripts/README.md new file mode 100644 index 000000000..dc442e9fd --- /dev/null +++ b/pcmdi_metrics/mean_climate/scripts/README.md @@ -0,0 +1,15 @@ +# Run PMP Mean Climate (PCMDI internal usage) + +## Generate annual cycle files +* `allvars_parallel_mod_clims.py`: PCMDI internal script to generate annual cycle netCDF files as the first step for mean climate metrics calculation +* `mk_CRF_clims.py`: after clims have been calculated the cloud radiative forcing (CRF) clims need to be calculated by combining radiation variables + +## Prepare run metrics calculations +* `get_all_MIP_mods_from_CLIMS.py`: Generate a json file that includes list of models, e.g., `all_mip_mods-v20230130.json` + +## Calculate metrics +* Serial mode + * mean_climate_driver.py -p ../param/pcmdi_MIP_EXP_pmp_parameterfile.py + +## Merge individual JSON files +* post_process_merge_jsons.py diff --git a/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py b/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py new file mode 100644 index 000000000..a02df4d47 --- /dev/null +++ b/pcmdi_metrics/mean_climate/scripts/allvars_parallel_mod_clims.py @@ -0,0 +1,65 @@ +import datetime +import glob +import os + +from pcmdi_metrics.misc.scripts import parallel_submitter + + +def find_latest(path): + dir_list = [p for p in glob.glob(path + "/v????????")] + return sorted(dir_list)[-1] + + +mip = 'cmip5' +# mip = 'cmip6' +exp = 'historical' +# exp = 'amip' +# verin = 'v20230201' +data_path = find_latest("/p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest") +start = '1981-01' +end = '2005-12' +numw = 20 # number of workers in parallel processing +verout = datetime.datetime.now().strftime('v%Y%m%d') + +# vars = ['rlut', 'tas', 'pr'] +# vars = ['ts', 'tas', 'uas', 'vas', 'huss', 'hurs', 'psl', 'prw', 'sfcWind', 'tauu', 'tauv', 'pr', 'rlut', 'rsut', 'rlutcs', 'rsutcs', 'rsdt', 'rsus', 'rsds', 'rlds', 'rlus', 'rldscs', 'rsdscs'] +# vars = ['ta', 'ua', 'va', 'zg', 'hur', 'hus'] +# vars = ['ts', 'tas', 'uas', 'vas', 'huss', 'hurs', 'psl', 'prw', 'sfcWind', 'tauu', 'tauv', 'pr', 'rlut', 'rsut', 'rlutcs', 'rsutcs', 'rsdt', 'rsus', 'rsds', 'rlds', 'rlus', 'rldscs', 'rsdscs', 'ta', 'ua', 'va', 'zg', 'hur', 'hus'] +# vars = ['ts', 'pr', 'tas', 'uas', 'vas', 'huss', 'hurs', 'psl', 'prw', 'sfcWind', 'tauu', 'tauv', 'rlut', 'rsut', 'rlutcs', 'rsutcs', 'rsdt', 'rsus', 'rsds', 'rlds', 'rlus', 'rldscs', 'rsdscs', 'ta', 'ua', 'va', 'zg', 'hur'] +vars = ['hur', 'hurs', 'huss', 'pr', 'prw', 'psl', 'rlds', 'rldscs', 'rlus', 'rlut', 'rlutcs', 'rsds', 'rsdscs', 'rsdt', 'rsus', 'rsut', 'rsutcs', 'sfcWind', 'ta', 'tas', 'tauu', 'tauv', 'ts', 'ua', 'uas', 'va', 'vas', 'zg'] +# vars = ['ts', 'pr'] + +lst1 = [] +listlog = [] + +for var in vars: + # pin = '/p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/' + verin + '/' + mip + '/' + exp + '/atmos/mon/' + var + '/' + pin = os.path.join(data_path, mip, exp,'atmos', 'mon', var) + + lst = sorted(glob.glob(os.path.join(pin, '*r1i1p1*.xml'))) + + pathout_base = '/p/user_pub/pmp/pmp_results/pmp_v1.1.2/diagnostic_results/CMIP_CLIMS/' + mip + '/' + exp + '/' + pathoutdir = os.path.join(pathout_base, verout, var) + + os.makedirs(pathoutdir, exist_ok=True) + + for li in lst: + + print(li.split('.')) + mod = li.split('.')[4] # model + rn = li.split('.')[5] # realization + + outfilename = mip + '.' + exp + '.' + mod + '.' + rn + '.mon.' + var + '.nc' + cmd0 = "pcmdi_compute_climatologies.py --start " + start + " --end " + end + " --infile " + + pathout = pathoutdir + '/' + outfilename + cmd = cmd0 + li + ' --outfile ' + pathout + ' --var ' + var + + lst1.append(cmd) + logf = mod + '.' + rn + '.' + var + listlog.append(logf) + print(logf) + +print('Number of jobs starting is ', str(len(lst1))) +parallel_submitter(lst1, log_dir='./logs/' + verout, logfilename_list=listlog, num_workers=numw) +print('done submitting') diff --git a/pcmdi_metrics/pcmdi/scripts/build_obs_meta_dictionary.py b/pcmdi_metrics/mean_climate/scripts/build_obs_meta_dictionary.py similarity index 100% rename from pcmdi_metrics/pcmdi/scripts/build_obs_meta_dictionary.py rename to pcmdi_metrics/mean_climate/scripts/build_obs_meta_dictionary.py diff --git a/pcmdi_metrics/mean_climate/scripts/get_all_MIP_mods_from_CLIMS.py b/pcmdi_metrics/mean_climate/scripts/get_all_MIP_mods_from_CLIMS.py new file mode 100755 index 000000000..5f95c08c4 --- /dev/null +++ b/pcmdi_metrics/mean_climate/scripts/get_all_MIP_mods_from_CLIMS.py @@ -0,0 +1,34 @@ +import glob +import json + +ver = 'v20230208' + +pin = '/p/user_pub/pmp/pmp_results/pmp_v1.1.2/diagnostic_results/CMIP_CLIMS/%(MIP)/%(EXP)/' + ver + '/ts/' + +# MIPS = ['cmip6', 'cmip5'] +# exps = ['historical', 'amip'] + +MIPS = ['cmip5'] +exps = ['historical'] + +mod_dic = {} + +for mip in MIPS: + mod_dic[mip] = {} + for exp in exps: + ptmp = pin.replace('%(MIP)', mip).replace('%(EXP)', exp) + print('MIP: ', mip) + print('exp: ', exp) + print('dir: ', ptmp) + + lst = sorted(glob.glob(ptmp + '*.r1*.AC.' + ver + '.nc')) + mods = [] + for li in lst: + mod = li.split('.')[4] + if mod not in mods: + mods.append(mod) + + print(mods) + mod_dic[mip][exp] = sorted(mods) + +json.dump(mod_dic, open('all_mip_mods-' + ver + '.json', 'w'), indent=4, sort_keys=True) diff --git a/pcmdi_metrics/pcmdi/scripts/make_obs_clim.py b/pcmdi_metrics/mean_climate/scripts/make_obs_clim.py similarity index 100% rename from pcmdi_metrics/pcmdi/scripts/make_obs_clim.py rename to pcmdi_metrics/mean_climate/scripts/make_obs_clim.py diff --git a/pcmdi_metrics/pcmdi/scripts/make_obs_clim.sh b/pcmdi_metrics/mean_climate/scripts/make_obs_clim.sh similarity index 100% rename from pcmdi_metrics/pcmdi/scripts/make_obs_clim.sh rename to pcmdi_metrics/mean_climate/scripts/make_obs_clim.sh diff --git a/pcmdi_metrics/pcmdi/scripts/make_obs_sftlf.py b/pcmdi_metrics/mean_climate/scripts/make_obs_sftlf.py similarity index 100% rename from pcmdi_metrics/pcmdi/scripts/make_obs_sftlf.py rename to pcmdi_metrics/mean_climate/scripts/make_obs_sftlf.py diff --git a/pcmdi_metrics/mean_climate/scripts/mk_CRF_clims.py b/pcmdi_metrics/mean_climate/scripts/mk_CRF_clims.py new file mode 100755 index 000000000..924daa541 --- /dev/null +++ b/pcmdi_metrics/mean_climate/scripts/mk_CRF_clims.py @@ -0,0 +1,132 @@ +#!/usr/local/uvcdat/latest/bin/python + +import glob +import os + +import cdms2 as cdms +import MV2 as MV + +cdms.setAutoBounds('on') + +cdms.setNetcdfShuffleFlag(0) +cdms.setNetcdfDeflateFlag(0) +cdms.setNetcdfDeflateLevelFlag(0) + +exp = 'historical' +# exp = 'amip' + +# MIP = 'cmip6' # 'CMIP6' +MIP = 'cmip5' # 'CMIP5' + +if MIP == 'cmip6': + ver = 'v20230202' +if MIP == 'cmip5': + ver = 'v20230208' + +# NEED TO RUN SEPERATELY FOR LW AND SW (i.e., rsut and rlut) +radvar = 'rsut' +# radvar = 'rlut' + +pit = '/p/user_pub/pmp/pmp_results/pmp_v1.1.2/diagnostic_results/CMIP_CLIMS/' + MIP + '/' + exp + '/' + ver + '/' +pi = pit + radvar + 'cs/' + +lst = sorted(glob.glob(pi + '*' + radvar + 'cs' '*.nc')) + +for lc in lst: + try: + li = lc.replace(radvar + 'cs', radvar) + + if os.path.isfile(li): + + if radvar == 'rsut': + fixname = 'rstcre' + elif radvar == 'rlut': + fixname = 'rltcre' + + os.makedirs(pi.replace(radvar + 'cs', fixname), exist_ok=True) + + f = cdms.open(li) + d = f(radvar) + fc = cdms.open(lc) + att_keys = fc.attributes.keys() + dc = fc(radvar + 'cs') + f.close() + fc.close() + + dgrid = d.getGrid() + + cre = MV.subtract(dc, d) + cre.setGrid(dgrid) + + cre.id = fixname + + cre.units = "W m-2" + + lo = li.replace(radvar, fixname) + + g = cdms.open(lo, 'w+') + for att in f.attributes.keys(): + setattr(g, att, f.attributes[att]) + g.write(cre) + g.close() + + print('done with ', lo) + + if radvar == 'rsut': + l1 = lc.replace('rsutcs', 'rsdt') + + try: + f1 = cdms.open(l1) + d1 = f1('rsdt') + # dif = -1.*d1 + dif = MV.subtract(d1, d) + + dif.units = 'W m-2' + dif.id = 'rst' + + l2 = l1.replace('rsdt', 'rst') + + os.makedirs(pit + '/rst', exist_ok=True) + + print('starting ', l2) + + g = cdms.open(l2, 'w+') + + for att in f1.attributes.keys(): + setattr(g, att, f1.attributes[att]) + g.write(dif) + + att_keys = f1.attributes.keys() + att_dic = {} + g.close() + f1.close() + + except Exception: + print('no rsdt ') # for ', l1 + + # ### AND FINALLY, THE NET + try: + lw = l2.replace('rst', 'rlut') + f3 = cdms.open(lw) + d3 = f3('rlut') + + net = MV.subtract(dif, d3) + net.id = 'rt' + + os.makedirs(pit + '/rt', exist_ok=True) + + ln = lw.replace('rlut', 'rt') + + g3 = cdms.open(ln, 'w+') + for att in f3.attributes.keys(): + setattr(g3, att, f3.attributes[att]) + + g3.write(net) + print('done with ', ln) + f3.close() + g3.close() + except Exception: + print('not working for ', lc) + except Exception: + print('not working for -----', lc) + pass diff --git a/pcmdi_metrics/pcmdi/scripts/obs_info_dictionary.json b/pcmdi_metrics/mean_climate/scripts/obs_info_dictionary.json similarity index 100% rename from pcmdi_metrics/pcmdi/scripts/obs_info_dictionary.json rename to pcmdi_metrics/mean_climate/scripts/obs_info_dictionary.json diff --git a/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-CMOR.py b/pcmdi_metrics/mean_climate/scripts/pcmdi_compute_climatologies-CMOR.py similarity index 99% rename from pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-CMOR.py rename to pcmdi_metrics/mean_climate/scripts/pcmdi_compute_climatologies-CMOR.py index 954c6203c..03e2aecd0 100644 --- a/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies-CMOR.py +++ b/pcmdi_metrics/mean_climate/scripts/pcmdi_compute_climatologies-CMOR.py @@ -12,7 +12,7 @@ import genutil import numpy -from pcmdi_metrics.driver.pmp_parser import PMPParser +from pcmdi_metrics.mean_climate.lib.pmp_parser import PMPParser try: import cmor diff --git a/pcmdi_metrics/mean_climate/scripts/post_process_merge_jsons.py b/pcmdi_metrics/mean_climate/scripts/post_process_merge_jsons.py new file mode 100755 index 000000000..4ff3286f9 --- /dev/null +++ b/pcmdi_metrics/mean_climate/scripts/post_process_merge_jsons.py @@ -0,0 +1,114 @@ +#!/usr/bin/env python + +import copy +import glob +import json +import os +import sys + +from genutil import StringConstructor + +from pcmdi_metrics.variability_mode.lib import dict_merge + + +def main(): + # mips = ['cmip5', 'cmip6'] + mips = ["cmip6"] + # mips = ['cmip3'] + + # exps = ['historical', 'amip'] + exps = ['historical'] + # exps = ["amip"] + # exps = ['20c3m', 'amip'] + # exps = ['20c3m'] + + case_id = "v20230202" + + syear = 1900 + eyear = 2005 + + obs_selection = "default" + # obs_selection = 'alternative' + + # pmprdir = '/work/lee1043/temporary/result_test' + pmprdir = "/p/user_pub/pmp/pmp_results/pmp_v1.1.2" + + for mip in mips: + for exp in exps: + variables = [s.split('/')[-1] for s in glob.glob(os.path.join(pmprdir, "metrics_results", "mean_climate", mip, exp, case_id, "*")) if os.path.isdir(s)] + print("variables:", variables) + for var in variables: + # json merge + #try: + if 1: + merge_json(mip, exp, case_id, var, obs_selection, syear, eyear, pmprdir) + """ + except Exception as err: + print("ERROR: ", mip, exp, var, err) + pass + """ + +def merge_json(mip, exp, case_id, var, obs, syear, eyear, pmprdir): + json_file_dir_template = ( + "metrics_results/mean_climate/%(mip)/%(exp)/%(case_id)/%(var)" + ) + json_file_dir_template = StringConstructor(json_file_dir_template) + json_file_dir = os.path.join( + pmprdir, + json_file_dir_template(mip=mip, exp=exp, case_id=case_id, var=var), + ) + + print('json_file_dir:', json_file_dir) + + json_file_template = "%(model)_%(var)_*_%(obs).json" + json_file_template = StringConstructor(json_file_template) + + # Search for individual JSONs + json_files = sorted( + glob.glob( + os.path.join( + json_file_dir, + json_file_template( + # mip=mip, + # exp=exp, + var=var, + model="*", + # run="*", + obs=obs, + ), + ) + ) + ) + + print('json_files:', json_files) + + # Remove diveDown JSONs and previously generated merged JSONs if included + json_files_revised = copy.copy(json_files) + for j, json_file in enumerate(json_files): + filename_component = json_file.split("/")[-1].split(".")[0].split("_") + if "allModels" in filename_component: + json_files_revised.remove(json_file) + elif "allRuns" in filename_component: + json_files_revised.remove(json_file) + + # Load individual JSON and merge to one big dictionary + for j, json_file in enumerate(json_files_revised): + print(j, json_file) + f = open(json_file) + dict_tmp = json.loads(f.read()) + if j == 0: + dict_final = dict_tmp.copy() + else: + dict_merge(dict_final, dict_tmp) + f.close() + + # Dump final dictionary to JSON + final_json_filename = StringConstructor("%(var)_%(mip)_%(exp)_%(case_id).json")(var=var, mip=mip, exp=exp, case_id=case_id) + final_json_file = os.path.join(json_file_dir, "..", final_json_filename) + + with open(final_json_file, "w") as fp: + json.dump(dict_final, fp, sort_keys=True, indent=4) + + +if __name__ == "__main__": + main() diff --git a/pcmdi_metrics/mjo/scripts/mjo_metrics_driver.py b/pcmdi_metrics/mjo/mjo_metrics_driver.py similarity index 99% rename from pcmdi_metrics/mjo/scripts/mjo_metrics_driver.py rename to pcmdi_metrics/mjo/mjo_metrics_driver.py index ed3a09ba5..b95bb8187 100755 --- a/pcmdi_metrics/mjo/scripts/mjo_metrics_driver.py +++ b/pcmdi_metrics/mjo/mjo_metrics_driver.py @@ -51,6 +51,9 @@ mjo_metrics_to_json, ) +from pcmdi_metrics.mean_climate.lib import pmp_parser + + # To avoid below error # OpenBLAS blas_thread_init: pthread_create failed for thread XX of 96: Resource temporarily unavailable # os.environ['OPENBLAS_NUM_THREADS'] = '1' @@ -71,7 +74,7 @@ # ================================================= # Collect user defined options # ------------------------------------------------- -P = pcmdi_metrics.driver.pmp_parser.PMPParser( +P = pmp_parser.PMPParser( description="Runs PCMDI MJO Computations", formatter_class=RawTextHelpFormatter ) P = AddParserArgument(P) diff --git a/pcmdi_metrics/pcmdi/__init__.py b/pcmdi_metrics/pcmdi/__init__.py deleted file mode 100644 index 1fc5a97e3..000000000 --- a/pcmdi_metrics/pcmdi/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -from . import annual_mean # noqa -from . import bias_xy # noqa -from . import cor_xy # noqa -from . import cor_xyt # noqa -from . import io # noqa -from . import mean_xy # noqa -from . import meanabs_xy # noqa -from . import pmp_parser # noqa -from . import rms_0 # noqa -from . import rms_xy # noqa -from . import rms_xyt # noqa -from . import rmsc_xy # noqa -from . import seasonal_mean # noqa -from . import std_xy # noqa -from . import std_xyt # noqa -from . import zonal_mean # noqa -from .mean_climate_metrics_calculations import compute_metrics # noqa -from .mean_climate_metrics_driver import PMPDriver, create_mean_climate_parser # noqa diff --git a/pcmdi_metrics/pcmdi/annual_mean.py b/pcmdi_metrics/pcmdi/annual_mean.py deleted file mode 100644 index c9fb5d67a..000000000 --- a/pcmdi_metrics/pcmdi/annual_mean.py +++ /dev/null @@ -1,18 +0,0 @@ -import cdms2 -import cdutil - - -def compute(dm, do): - """Computes ANNUAL MEAN""" - if dm is None and do is None: # just want the doc - return { - "Name": "Annual Mean", - "Abstract": "Compute Annual Mean", - "URI": "http://uvcdat.llnl.gov/documentation/" - + "utilities/utilities-2.html", - "Contact": "pcmdi-metrics@llnl.gov", - "Comments": "Assumes input are 12 months climatology", - } - # Do we really want this? Wouldn't it better to let it fails - cdms2.setAutoBounds("on") - return cdutil.averager(dm, axis="t"), cdutil.averager(do, axis="t") diff --git a/pcmdi_metrics/pcmdi/bias_xy.py b/pcmdi_metrics/pcmdi/bias_xy.py deleted file mode 100644 index b9d0e6bdb..000000000 --- a/pcmdi_metrics/pcmdi/bias_xy.py +++ /dev/null @@ -1,18 +0,0 @@ -import cdutil -import MV2 - - -def compute(dm, do): - """Computes bias""" - if dm is None and do is None: # just want the doc - return { - "Name": "Bias", - "Abstract": "Compute Full Average of Model - Observation", - "Contact": "pcmdi-metrics@llnl.gov", - } - dif = MV2.subtract(dm, do) - return MV2.float(cdutil.averager(dif, axis="xy", weights="weighted")) - - -# return MV2.float(MV2.average(MV2.subtract(dm, do))) deprecated - does -# not use area weights diff --git a/pcmdi_metrics/pcmdi/bias_xyt.py b/pcmdi_metrics/pcmdi/bias_xyt.py deleted file mode 100644 index 654fa5890..000000000 --- a/pcmdi_metrics/pcmdi/bias_xyt.py +++ /dev/null @@ -1,18 +0,0 @@ -import cdutil -import MV2 - - -def compute(dm, do): - """Computes bias""" - if dm is None and do is None: # just want the doc - return { - "Name": "Bias", - "Abstract": "Compute Full Average of Model - Observation", - "Contact": "pcmdi-metrics@llnl.gov", - } - dif = MV2.subtract(dm, do) - return MV2.float(cdutil.averager(dif, axis="xyt", weights="weighted")) - - -# return MV2.float(MV2.average(MV2.subtract(dm, do))) deprecated - does -# not use area weights diff --git a/pcmdi_metrics/pcmdi/cor_xy.py b/pcmdi_metrics/pcmdi/cor_xy.py deleted file mode 100644 index 985b6c03e..000000000 --- a/pcmdi_metrics/pcmdi/cor_xy.py +++ /dev/null @@ -1,14 +0,0 @@ -import genutil - - -def compute(dm, do): - """Computes correlation""" - if dm is None and do is None: # just want the doc - return { - "Name": "Spatial Correlation", - "Abstract": "Compute Spatial Correlation", - "URI": "http://uvcdat.llnl.gov/documentation/utilities/" - + "utilities-2.html", - "Contact": "pcmdi-metrics@llnl.gov", - } - return float(genutil.statistics.correlation(dm, do, axis="xy", weights="weighted")) diff --git a/pcmdi_metrics/pcmdi/cor_xyt.py b/pcmdi_metrics/pcmdi/cor_xyt.py deleted file mode 100644 index a345712d3..000000000 --- a/pcmdi_metrics/pcmdi/cor_xyt.py +++ /dev/null @@ -1,14 +0,0 @@ -import genutil - - -def compute(dm, do): - """Computes correlation""" - if dm is None and do is None: # just want the doc - return { - "Name": "Spatial and Temporal Correlation", - "Abstract": "Compute Spatio-Temporal Correlation", - "URI": "http://uvcdat.llnl.gov/documentation/utilities/" - + "utilities-2.html", - "Contact": "pcmdi-metrics@llnl.gov", - } - return float(genutil.statistics.correlation(dm, do, axis="xyt", weights="weighted")) diff --git a/pcmdi_metrics/pcmdi/mean_climate_metrics_calculations.py b/pcmdi_metrics/pcmdi/mean_climate_metrics_calculations.py deleted file mode 100644 index 24c94e3aa..000000000 --- a/pcmdi_metrics/pcmdi/mean_climate_metrics_calculations.py +++ /dev/null @@ -1,257 +0,0 @@ -import collections - -import cdms2 as cdms -import MV2 -from genutil import grower - -import pcmdi_metrics - - -def compute_metrics(Var, dm, do): - # Var is sometimes sent with level associated - var = Var.split("_")[0] - # Did we send data? Or do we just want the info? - if dm is None and do is None: - metrics_defs = collections.OrderedDict() - metrics_defs["rms_xyt"] = pcmdi_metrics.pcmdi.rms_xyt.compute(None, None) - metrics_defs["rms_xy"] = pcmdi_metrics.pcmdi.rms_xy.compute(None, None) - metrics_defs["rmsc_xy"] = pcmdi_metrics.pcmdi.rmsc_xy.compute(None, None) - metrics_defs["bias_xy"] = pcmdi_metrics.pcmdi.bias_xy.compute(None, None) - metrics_defs["mae_xy"] = pcmdi_metrics.pcmdi.meanabs_xy.compute(None, None) - metrics_defs["cor_xy"] = pcmdi_metrics.pcmdi.cor_xy.compute(None, None) - metrics_defs["mean_xy"] = pcmdi_metrics.pcmdi.mean_xy.compute(None) - metrics_defs["std_xy"] = pcmdi_metrics.pcmdi.std_xy.compute(None) - metrics_defs["std_xyt"] = pcmdi_metrics.pcmdi.std_xyt.compute(None) - - metrics_defs["seasonal_mean"] = pcmdi_metrics.pcmdi.seasonal_mean.compute( - None, None - ) - metrics_defs["annual_mean"] = pcmdi_metrics.pcmdi.annual_mean.compute( - None, None - ) - metrics_defs["zonal_mean"] = pcmdi_metrics.pcmdi.zonal_mean.compute(None, None) - return metrics_defs - cdms.setAutoBounds("on") - metrics_dictionary = {} - - # SET CONDITIONAL ON INPUT VARIABLE - if var == "pr": - conv = 86400.0 - else: - conv = 1.0 - - if var in ["hus"]: - sig_digits = ".5f" - else: - sig_digits = ".3f" - - # CALCULATE ANNUAL CYCLE SPACE-TIME RMS, CORRELATIONS and STD - rms_xyt = pcmdi_metrics.pcmdi.rms_xyt.compute(dm, do) - stdObs_xyt = pcmdi_metrics.pcmdi.std_xyt.compute(do) - std_xyt = pcmdi_metrics.pcmdi.std_xyt.compute(dm) - - # CALCULATE ANNUAL MEANS - dm_am, do_am = pcmdi_metrics.pcmdi.annual_mean.compute(dm, do) - - # CALCULATE ANNUAL MEAN BIAS - bias_xy = pcmdi_metrics.pcmdi.bias_xy.compute(dm_am, do_am) - - # CALCULATE MEAN ABSOLUTE ERROR - mae_xy = pcmdi_metrics.pcmdi.meanabs_xy.compute(dm_am, do_am) - - # CALCULATE ANNUAL MEAN RMS (centered and uncentered) - rms_xy = pcmdi_metrics.pcmdi.rms_xy.compute(dm_am, do_am) - rmsc_xy = pcmdi_metrics.pcmdi.rmsc_xy.compute(dm_am, do_am) - - # CALCULATE ANNUAL MEAN CORRELATION - cor_xy = pcmdi_metrics.pcmdi.cor_xy.compute(dm_am, do_am) - - # CALCULATE ANNUAL OBS and MOD STD - stdObs_xy = pcmdi_metrics.pcmdi.std_xy.compute(do_am) - std_xy = pcmdi_metrics.pcmdi.std_xy.compute(dm_am) - - # CALCULATE ANNUAL OBS and MOD MEAN - meanObs_xy = pcmdi_metrics.pcmdi.mean_xy.compute(do_am) - mean_xy = pcmdi_metrics.pcmdi.mean_xy.compute(dm_am) - - # ZONAL MEANS ###### - # CALCULATE ANNUAL MEANS - dm_amzm, do_amzm = pcmdi_metrics.pcmdi.zonal_mean.compute(dm_am, do_am) - - # CALCULATE ANNUAL AND ZONAL MEAN RMS - rms_y = pcmdi_metrics.pcmdi.rms_0.compute(dm_amzm, do_amzm) - - # CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN RMS - dm_amzm_grown, dummy = grower(dm_amzm, dm_am) - dm_am_devzm = MV2.subtract(dm_am, dm_amzm_grown) - do_amzm_grown, dummy = grower(do_amzm, do_am) - do_am_devzm = MV2.subtract(do_am, do_amzm_grown) - rms_xy_devzm = pcmdi_metrics.pcmdi.rms_xy.compute(dm_am_devzm, do_am_devzm) - - # CALCULATE ANNUAL AND ZONAL MEAN STD - - # CALCULATE ANNUAL MEAN DEVIATION FROM ZONAL MEAN STD - stdObs_xy_devzm = pcmdi_metrics.pcmdi.std_xy.compute(do_am_devzm) - std_xy_devzm = pcmdi_metrics.pcmdi.std_xy.compute(dm_am_devzm) - - for stat in [ - "std-obs_xy", - "std_xy", - "std-obs_xyt", - "std_xyt", - "std-obs_xy_devzm", - "mean_xy", - "mean-obs_xy", - "std_xy_devzm", - "rms_xyt", - "rms_xy", - "rmsc_xy", - "cor_xy", - "bias_xy", - "mae_xy", - "rms_y", - "rms_devzm", - ]: - metrics_dictionary[stat] = {} - - metrics_dictionary["mean-obs_xy"]["ann"] = format(meanObs_xy * conv, sig_digits) - metrics_dictionary["mean_xy"]["ann"] = format(mean_xy * conv, sig_digits) - metrics_dictionary["std-obs_xy"]["ann"] = format(stdObs_xy * conv, sig_digits) - metrics_dictionary["std_xy"]["ann"] = format(std_xy * conv, sig_digits) - metrics_dictionary["std-obs_xyt"]["ann"] = format(stdObs_xyt * conv, sig_digits) - metrics_dictionary["std_xyt"]["ann"] = format(std_xyt * conv, sig_digits) - metrics_dictionary["std-obs_xy_devzm"]["ann"] = format( - stdObs_xy_devzm * conv, sig_digits - ) - metrics_dictionary["std_xy_devzm"]["ann"] = format(std_xy_devzm * conv, sig_digits) - metrics_dictionary["rms_xyt"]["ann"] = format(rms_xyt * conv, sig_digits) - metrics_dictionary["rms_xy"]["ann"] = format(rms_xy * conv, sig_digits) - metrics_dictionary["rmsc_xy"]["ann"] = format(rmsc_xy * conv, sig_digits) - metrics_dictionary["cor_xy"]["ann"] = format(cor_xy, sig_digits) - metrics_dictionary["bias_xy"]["ann"] = format(bias_xy * conv, sig_digits) - metrics_dictionary["mae_xy"]["ann"] = format(mae_xy * conv, sig_digits) - # ZONAL MEAN CONTRIBUTIONS - metrics_dictionary["rms_y"]["ann"] = format(rms_y * conv, sig_digits) - metrics_dictionary["rms_devzm"]["ann"] = format(rms_xy_devzm * conv, sig_digits) - - # CALCULATE SEASONAL MEANS - for sea in ["djf", "mam", "jja", "son"]: - - dm_sea = pcmdi_metrics.pcmdi.seasonal_mean.compute(dm, sea) - do_sea = pcmdi_metrics.pcmdi.seasonal_mean.compute(do, sea) - - # CALCULATE SEASONAL RMS AND CORRELATION - rms_sea = pcmdi_metrics.pcmdi.rms_xy.compute(dm_sea, do_sea) - rmsc_sea = pcmdi_metrics.pcmdi.rmsc_xy.compute(dm_sea, do_sea) - cor_sea = pcmdi_metrics.pcmdi.cor_xy.compute(dm_sea, do_sea) - mae_sea = pcmdi_metrics.pcmdi.meanabs_xy.compute(dm_sea, do_sea) - bias_sea = pcmdi_metrics.pcmdi.bias_xy.compute(dm_sea, do_sea) - - # CALCULATE SEASONAL OBS and MOD STD - stdObs_xy_sea = pcmdi_metrics.pcmdi.std_xy.compute(do_sea) - std_xy_sea = pcmdi_metrics.pcmdi.std_xy.compute(dm_sea) - - # CALCULATE SEASONAL OBS and MOD MEAN - meanObs_xy_sea = pcmdi_metrics.pcmdi.mean_xy.compute(do_sea) - mean_xy_sea = pcmdi_metrics.pcmdi.mean_xy.compute(dm_sea) - - metrics_dictionary["bias_xy"][sea] = format(bias_sea * conv, sig_digits) - metrics_dictionary["rms_xy"][sea] = format(rms_sea * conv, sig_digits) - metrics_dictionary["rmsc_xy"][sea] = format(rmsc_sea * conv, sig_digits) - metrics_dictionary["cor_xy"][sea] = format(cor_sea, ".2f") - metrics_dictionary["mae_xy"][sea] = format(mae_sea * conv, sig_digits) - metrics_dictionary["std-obs_xy"][sea] = format(stdObs_xy_sea * conv, sig_digits) - metrics_dictionary["std_xy"][sea] = format(std_xy_sea * conv, sig_digits) - metrics_dictionary["mean-obs_xy"][sea] = format( - meanObs_xy_sea * conv, sig_digits - ) - metrics_dictionary["mean_xy"][sea] = format(mean_xy_sea * conv, sig_digits) - - rms_mo_l = [] - rmsc_mo_l = [] - cor_mo_l = [] - mae_mo_l = [] - bias_mo_l = [] - stdObs_xy_mo_l = [] - std_xy_mo_l = [] - meanObs_xy_mo_l = [] - mean_xy_mo_l = [] - - for n, mo in enumerate( - [ - "jan", - "feb", - "mar", - "apr", - "may", - "jun", - "jul", - "aug", - "sep", - "oct", - "nov", - "dec", - ] - ): - dm_mo = dm[n] - do_mo = do[n] - - # CALCULATE MONTHLY RMS AND CORRELATION - rms_mo = pcmdi_metrics.pcmdi.rms_xy.compute(dm_mo, do_mo) - rmsc_mo = pcmdi_metrics.pcmdi.rmsc_xy.compute(dm_mo, do_mo) - cor_mo = pcmdi_metrics.pcmdi.cor_xy.compute(dm_mo, do_mo) - mae_mo = pcmdi_metrics.pcmdi.meanabs_xy.compute(dm_mo, do_mo) - bias_mo = pcmdi_metrics.pcmdi.bias_xy.compute(dm_mo, do_mo) - - # CALCULATE MONTHLY OBS and MOD STD - stdObs_xy_mo = pcmdi_metrics.pcmdi.std_xy.compute(do_mo) - std_xy_mo = pcmdi_metrics.pcmdi.std_xy.compute(dm_mo) - - # CALCULATE MONTHLY OBS and MOD MEAN - meanObs_xy_mo = pcmdi_metrics.pcmdi.mean_xy.compute(do_mo) - mean_xy_mo = pcmdi_metrics.pcmdi.mean_xy.compute(dm_mo) - - rms_mo_l.append(format(rms_mo * conv, sig_digits)) - rmsc_mo_l.append(format(rmsc_mo * conv, sig_digits)) - cor_mo_l.append(format(cor_mo, ".2f")) - mae_mo_l.append(format(mae_mo * conv, sig_digits)) - bias_mo_l.append(format(bias_mo * conv, sig_digits)) - stdObs_xy_mo_l.append(format(stdObs_xy_mo * conv, sig_digits)) - std_xy_mo_l.append(format(std_xy_mo * conv, sig_digits)) - meanObs_xy_mo_l.append(format(meanObs_xy_mo * conv, sig_digits)) - mean_xy_mo_l.append(format(mean_xy_mo * conv, sig_digits)) - - # metrics_dictionary['bias_xy'][mo] = format( bias_mo * conv, sig_digits) - # metrics_dictionary['rms_xy'][mo] = format( rms_mo * conv, sig_digits) - # metrics_dictionary['rmsc_xy'][mo] = format( rmsc_mo * conv, sig_digits) - # metrics_dictionary['cor_xy'][mo] = format( cor_mo, '.2f') - # metrics_dictionary['mae_xy'][mo] = format( mae_mo * conv, sig_digits) - # metrics_dictionary['std-obs_xy'][mo] = format( stdObs_xy_mo * conv, sig_digits) - # metrics_dictionary['std_xy'][mo] = format( std_xy_mo * conv, sig_digits) - # metrics_dictionary['mean-obs_xy'][mo] = format( meanObs_xy_mo * conv, sig_digits) - # metrics_dictionary['mean_xy'][mo] = format( mean_xy_mo * conv, sig_digits) - - metrics_dictionary["bias_xy"]["CalendarMonths"] = bias_mo_l - metrics_dictionary["rms_xy"]["CalendarMonths"] = rms_mo_l - metrics_dictionary["rmsc_xy"]["CalendarMonths"] = rmsc_mo_l - metrics_dictionary["cor_xy"]["CalendarMonths"] = cor_mo_l - metrics_dictionary["mae_xy"]["CalendarMonths"] = mae_mo_l - metrics_dictionary["std-obs_xy"]["CalendarMonths"] = stdObs_xy_mo_l - metrics_dictionary["std_xy"]["CalendarMonths"] = std_xy_mo_l - metrics_dictionary["mean-obs_xy"]["CalendarMonths"] = meanObs_xy_mo_l - metrics_dictionary["mean_xy"]["CalendarMonths"] = mean_xy_mo_l - - return metrics_dictionary - - -# ZONAL AND SEASONAL MEAN CONTRIBUTIONS -# metrics_dictionary[ 'rms_y'][ sea] = format( -# rms_y * -# conv, -# sig_digits) -# metrics_dictionary[ 'rms_devzm'][ sea] = format( -# rms_xy_devzm * -# conv, -# sig_digits) - -# return metrics_dictionary diff --git a/pcmdi_metrics/pcmdi/mean_xy.py b/pcmdi_metrics/pcmdi/mean_xy.py deleted file mode 100644 index eb6779c19..000000000 --- a/pcmdi_metrics/pcmdi/mean_xy.py +++ /dev/null @@ -1,17 +0,0 @@ -import cdutil -import MV2 - - -def compute(d): - """Computes bias""" - if d is None: # just want the doc - return { - "Name": "Mean", - "Abstract": "Area Mean (area weighted)", - "Contact": "pcmdi-metrics@llnl.gov", - } - return MV2.float(cdutil.averager(d, axis="xy", weights="weighted")) - - -# return MV2.float(MV2.average(MV2.subtract(dm, do))) deprecated - does -# not use area weights diff --git a/pcmdi_metrics/pcmdi/meanabs_xy.py b/pcmdi_metrics/pcmdi/meanabs_xy.py deleted file mode 100644 index 58da3a31f..000000000 --- a/pcmdi_metrics/pcmdi/meanabs_xy.py +++ /dev/null @@ -1,19 +0,0 @@ -import cdutil -import MV2 - - -def compute(dm, do): - """Computes Mean Absolute Error""" - if dm is None and do is None: # just want the doc - return { - "Name": "Mean Absolute Error", - "Abstract": "Compute Full Average of " - + "Absolute Difference Between Model And Observation", - "Contact": "pcmdi-metrics@llnl.gov", - } - absdif = MV2.absolute(MV2.subtract(dm, do)) - mae = cdutil.averager(absdif, axis="xy", weights="weighted") - - # mae = MV.average(MV.absolute(MV.subtract(dm, do))) - depricated ... did - # not include area weights - return float(mae) diff --git a/pcmdi_metrics/pcmdi/meanabs_xyt.py b/pcmdi_metrics/pcmdi/meanabs_xyt.py deleted file mode 100644 index 5fc8e566f..000000000 --- a/pcmdi_metrics/pcmdi/meanabs_xyt.py +++ /dev/null @@ -1,19 +0,0 @@ -import cdutil -import MV2 - - -def compute(dm, do): - """Computes Mean Absolute Error""" - if dm is None and do is None: # just want the doc - return { - "Name": "Mean Absolute Error", - "Abstract": "Compute Full Average of " - + "Absolute Difference Between Model And Observation", - "Contact": "pcmdi-metrics@llnl.gov", - } - absdif = MV2.absolute(MV2.subtract(dm, do)) - mae = cdutil.averager(absdif, axis="xyt", weights="weighted") - - # mae = MV.average(MV.absolute(MV.subtract(dm, do))) - depricated ... did - # not include area weights - return float(mae) diff --git a/pcmdi_metrics/pcmdi/pmp_parser.py b/pcmdi_metrics/pcmdi/pmp_parser.py deleted file mode 100644 index 9442ee143..000000000 --- a/pcmdi_metrics/pcmdi/pmp_parser.py +++ /dev/null @@ -1,11 +0,0 @@ -import pcmdi_metrics.driver.pmp_parser as pmp_parser - - -class PMPParser(pmp_parser.PMPParser): - def __init__(self, warning=True, *args, **kwargs): - # conflict_handler='resolve' lets new args override older ones - super(PMPParser, self).__init__(*args, **kwargs) - if warning: - print( - "Deprecation warning: please use 'import pcmdi_metrics.driver.pmp_parser.PMPParser'" - ) diff --git a/pcmdi_metrics/pcmdi/rms_0.py b/pcmdi_metrics/pcmdi/rms_0.py deleted file mode 100644 index f0db1284b..000000000 --- a/pcmdi_metrics/pcmdi/rms_0.py +++ /dev/null @@ -1,17 +0,0 @@ -import genutil - - -def compute(dm, do): - """Computes rms over first axis""" - if dm is None and do is None: # just want the doc - return { - "Name": "Root Mean Square over First Axis", - "Abstract": "Compute Root Mean Square over the first axis", - "URI": "http://uvcdat.llnl.gov/documentation/" - + "utilities/utilities-2.html", - "Contact": "pcmdi-metrics@llnl.gov", - } - if 1 in [x.isLevel() for x in dm.getAxisList()]: - dm = dm(squeeze=1) - do = do(squeeze=1) - return float(genutil.statistics.rms(dm, do)) diff --git a/pcmdi_metrics/pcmdi/rms_xy.py b/pcmdi_metrics/pcmdi/rms_xy.py deleted file mode 100644 index 06c477524..000000000 --- a/pcmdi_metrics/pcmdi/rms_xy.py +++ /dev/null @@ -1,14 +0,0 @@ -import genutil - - -def compute(dm, do): - """Computes rms""" - if dm is None and do is None: # just want the doc - return { - "Name": "Spatial Root Mean Square", - "Abstract": "Compute Spatial Root Mean Square", - "URI": "http://uvcdat.llnl.gov/documentation/" - + "utilities/utilities-2.html", - "Contact": "pcmdi-metrics@llnl.gov", - } - return float(genutil.statistics.rms(dm, do, axis="xy", weights="weighted")) diff --git a/pcmdi_metrics/pcmdi/rms_xyt.py b/pcmdi_metrics/pcmdi/rms_xyt.py deleted file mode 100644 index 7d0eae0b8..000000000 --- a/pcmdi_metrics/pcmdi/rms_xyt.py +++ /dev/null @@ -1,14 +0,0 @@ -import genutil - - -def compute(dm, do): - """Computes rms""" - if dm is None and do is None: # just want the doc - return { - "Name": "Spatio-Temporal Root Mean Square", - "Abstract": "Compute Spatial and Temporal Root Mean Square", - "URI": "http://uvcdat.llnl.gov/documentation/" - + "utilities/utilities-2.html", - "Contact": "pcmdi-metrics@llnl.gov", - } - return float(genutil.statistics.rms(dm, do, axis="xyt", weights="weighted")) diff --git a/pcmdi_metrics/pcmdi/rmsc_xy.py b/pcmdi_metrics/pcmdi/rmsc_xy.py deleted file mode 100644 index 125e57de2..000000000 --- a/pcmdi_metrics/pcmdi/rmsc_xy.py +++ /dev/null @@ -1,16 +0,0 @@ -import genutil - - -def compute(dm, do): - """Computes centered rms""" - if dm is None and do is None: # just want the doc - return { - "Name": "Spatial Root Mean Square", - "Abstract": "Compute Centered Spatial Root Mean Square", - "URI": "http://uvcdat.llnl.gov/documentation/" - + "utilities/utilities-2.html", - "Contact": "pcmdi-metrics@llnl.gov", - } - return float( - genutil.statistics.rms(dm, do, axis="xy", centered=1, weights="weighted") - ) diff --git a/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies.py b/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies.py deleted file mode 100644 index f17bb030d..000000000 --- a/pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies.py +++ /dev/null @@ -1,214 +0,0 @@ -#!/usr/bin/env python -import datetime - -import cdms2 -from genutil import StringConstructor - -import pcmdi_metrics - -ver = datetime.datetime.now().strftime("v%Y%m%d") - -cdms2.setNetcdfShuffleFlag(0) -cdms2.setNetcdfDeflateFlag(0) -cdms2.setNetcdfDeflateLevelFlag(0) - -# - - -def clim_calc(var, infile, outfile, outdir, outfilename, start, end): - import datetime - import os - - import cdms2 - import cdtime - import cdutil - - ver = datetime.datetime.now().strftime("v%Y%m%d") - - lf = infile - tmp = lf.split("/") - infilename = tmp[len(tmp) - 1] - print("infilename is ", infilename) - - f = cdms2.open(lf) - atts = f.listglobal() - outfd = outfile - - # CONTROL OF OUTPUT DIRECTORY AND FILE - - # outdir AND outfilename PROVIDED BY USER - if outdir is not None and outfilename is not None: - outfd = outdir + outfilename - - # outdir PROVIDED BY USER, BUT filename IS TAKEN FROM infilename WITH CLIM MODIFICATIONS SUFFIX ADDED BELOW - if outdir is not None and outfilename is None: - outfd = outdir + "/" + infilename - - if outdir is None and outfilename is None: - outfd = outfile - - print("outfd is ", outfd) - print("outdir is ", outdir) - - seperate_clims = "y" - - # DEFAULT CLIM - BASED ON ENTIRE TIME SERIES - if (start is None) and (end is None): - d = f(var) - t = d.getTime() - c = t.asComponentTime() - start_yr_str = str(c[0].year) - start_mo_str = str(c[0].month) - end_yr_str = str(c[len(c) - 1].year) - end_mo_str = str(c[len(c) - 1].month) - start_yr = int(start_yr_str) - start_mo = int(start_mo_str) - end_yr = int(end_yr_str) - end_mo = int(end_mo_str) - - # USER DEFINED PERIOD - else: - start_mo = int(start.split("-")[1]) - start_yr = int(start.split("-")[0]) - end_mo = int(end.split("-")[1]) - end_yr = int(end.split("-")[0]) - start_yr_str = str(start_yr) - start_mo_str = str(start_mo) - end_yr_str = str(end_yr) - end_mo_str = str(end_mo) - - d = f( - var, time=(cdtime.comptime(start_yr, start_mo), cdtime.comptime(end_yr, end_mo)) - ) - - print("start_yr_str is ", start_yr_str) - - if start_mo_str not in ["11", "12"]: - start_mo_str = "0" + start_mo_str - if end_mo_str not in ["11", "12"]: - end_mo_str = "0" + end_mo_str - - d_ac = cdutil.ANNUALCYCLE.climatology(d).astype("float32") - d_djf = cdutil.DJF.climatology(d)(squeeze=1).astype("float32") - d_jja = cdutil.JJA.climatology(d)(squeeze=1).astype("float32") - d_son = cdutil.SON.climatology(d)(squeeze=1).astype("float32") - d_mam = cdutil.MAM.climatology(d)(squeeze=1).astype("float32") - - for v in [d_ac, d_djf, d_jja, d_son, d_mam]: - - v.id = var - - for s in ["AC", "DJF", "MAM", "JJA", "SON"]: - - addf = ( - "." - + start_yr_str - + start_mo_str - + "-" - + end_yr_str - + end_mo_str - + "." - + s - + "." - + ver - + ".nc" - ) - - if seperate_clims == "y": - print("outfd is ", outfd) - out = outfd - out = out.replace(".nc", addf) - out = out.replace(".xml", addf) - print("out is ", out) - - if seperate_clims == "n": - out = outfd.replace("climo.nc", s + ".nc") - if s == "AC": - do = d_ac - if s == "DJF": - do = d_djf - if s == "MAM": - do = d_mam - if s == "JJA": - do = d_jja - if s == "SON": - do = d_son - do.id = var - - # MKDIRS AS NEEDED - lst = outfd.split("/") - s = "/" - for ll in range(len(lst)): - d = s.join(lst[0 : ll + 1]) - try: - os.mkdir(d) - except OSError: - pass - - g = cdms2.open(out, "w+") - g.write(do) - - for att in atts: - setattr(g, att, f.getglobal(att)) - g.close() - print(do.shape, " ", d_ac.shape, " ", out) - f.close() - return - - -####################################################################### - - -P = pcmdi_metrics.driver.pmp_parser.PMPMetricsParser() - - -P.add_argument( - "--vars", dest="vars", help="List of variables", nargs="+", required=False -) -P.add_argument("--infile", dest="infile", help="Defines infile", required=False) -P.add_argument( - "--outfile", dest="outfile", help="Defines output path and filename", required=False -) -P.add_argument("--outpath", dest="outpath", help="Defines outpath only", required=False) -P.add_argument( - "--outfilename", - dest="outfilename", - help="Defines out filename only", - required=False, -) -P.add_argument( - "--start", dest="start", help="Defines start year and month", required=False -) -P.add_argument("--end", dest="end", help="Defines end year and month", required=False) - -args = P.get_parameter() - -infile_template = args.infile -outfile_template = args.outfile -outpath_template = args.outpath -outfilename_template = args.outfilename -varlist = args.vars -start = args.start -end = args.end - -print("start and end are ", start, " ", end) -print("variable list: ", varlist) - -InFile = StringConstructor(infile_template) -OutFile = StringConstructor(outfile_template) -OutFileName = StringConstructor(outfilename_template) -OutPath = StringConstructor(outpath_template) - -for var in varlist: - # Build filenames - InFile.variable = var - OutFile.variable = var - OutFileName.variable = var - OutPath.variable = var - infile = InFile() - outfile = OutFile() - outfilename = OutFileName() - outpath = OutPath() - - # calculate climatologies for this variable - clim_calc(var, infile, outfile, outpath, outfilename, start, end) diff --git a/pcmdi_metrics/pcmdi/seasonal_mean.py b/pcmdi_metrics/pcmdi/seasonal_mean.py deleted file mode 100644 index 2220ae64e..000000000 --- a/pcmdi_metrics/pcmdi/seasonal_mean.py +++ /dev/null @@ -1,30 +0,0 @@ -def compute(d, sea): - """Computes SEASONAL MEAN""" - if d is None and sea is None: # just want the doc - return { - "Name": "Seasonal Mean", - "Abstract": "Compute Seasonal Mean", - "Contact": "pcmdi-metrics@llnl.gov", - "Comments": "Assumes input are 12 months climatology", - } - - mo_wts = [31, 31, 28.25, 31, 30, 31, 30, 31, 31, 30, 31, 30] - - if sea == "djf": - indx = [11, 0, 1] - if sea == "mam": - indx = [2, 3, 4] - if sea == "jja": - indx = [5, 6, 7] - if sea == "son": - indx = [8, 9, 10] - - sea_no_days = mo_wts[indx[0]] + mo_wts[indx[1]] + mo_wts[indx[2]] - - d_sea = ( - d[indx[0]] * mo_wts[indx[0]] - + d[indx[1]] * mo_wts[indx[1]] - + d[indx[2]] * mo_wts[indx[2]] - ) / sea_no_days - - return d_sea diff --git a/pcmdi_metrics/pcmdi/std_xy.py b/pcmdi_metrics/pcmdi/std_xy.py deleted file mode 100644 index ada831346..000000000 --- a/pcmdi_metrics/pcmdi/std_xy.py +++ /dev/null @@ -1,14 +0,0 @@ -import genutil - - -def compute(d): - """Computes std""" - if d is None: # just want the doc - return { - "Name": "Spatial Standard Deviation", - "Abstract": "Compute Spatial Standard Deviation", - "URI": "http://uvcdat.llnl.gov/documentation/" - + "utilities/utilities-2.html", - "Contact": "pcmdi-metrics@llnl.gov", - } - return float(genutil.statistics.std(d, axis="xy", weights="weighted")) diff --git a/pcmdi_metrics/pcmdi/std_xyt.py b/pcmdi_metrics/pcmdi/std_xyt.py deleted file mode 100644 index 1bb04a0b9..000000000 --- a/pcmdi_metrics/pcmdi/std_xyt.py +++ /dev/null @@ -1,14 +0,0 @@ -import genutil - - -def compute(d): - """Computes std""" - if d is None: # just want the doc - return { - "Name": "Spatial-temporal Standard Deviation", - "Abstract": "Compute Space-Time Standard Deviation", - "URI": "http://uvcdat.llnl.gov/documentation/" - + "utilities/utilities-2.html", - "Contact": "pcmdi-metrics@llnl.gov", - } - return float(genutil.statistics.std(d, axis="xyt", weights="weighted")) diff --git a/pcmdi_metrics/pcmdi/zonal_mean.py b/pcmdi_metrics/pcmdi/zonal_mean.py deleted file mode 100644 index 3084c5a04..000000000 --- a/pcmdi_metrics/pcmdi/zonal_mean.py +++ /dev/null @@ -1,15 +0,0 @@ -import cdutil - - -def compute(dm, do): - """Computes ZONAL MEAN assumes rectilinear/regular grid""" - if dm is None and do is None: # just want the doc - return { - "Name": "Zonal Mean", - "Abstract": "Compute Zonal Mean", - "URI": "http://uvcdat.llnl.gov/documentation/" - + "utilities/utilities-2.html", - "Contact": "pcmdi-metrics@llnl.gov", - "Comments": "", - } - return cdutil.averager(dm, axis="x"), cdutil.averager(do, axis="x") diff --git a/pcmdi_metrics/precip_variability/variability_across_timescales_PS_driver.py b/pcmdi_metrics/precip_variability/variability_across_timescales_PS_driver.py index bf2af43ca..046746438 100644 --- a/pcmdi_metrics/precip_variability/variability_across_timescales_PS_driver.py +++ b/pcmdi_metrics/precip_variability/variability_across_timescales_PS_driver.py @@ -5,7 +5,7 @@ from genutil import StringConstructor -from pcmdi_metrics.driver.pmp_parser import PMPParser +from pcmdi_metrics.mean_climate.lib.pmp_parser import PMPParser from pcmdi_metrics.precip_variability.lib import ( AddParserArgument, precip_variability_across_timescale, diff --git a/pcmdi_metrics/variability_mode/variability_modes_driver.py b/pcmdi_metrics/variability_mode/variability_modes_driver.py index 318db1bfa..a94ac6d0d 100755 --- a/pcmdi_metrics/variability_mode/variability_modes_driver.py +++ b/pcmdi_metrics/variability_mode/variability_modes_driver.py @@ -83,6 +83,8 @@ variability_metrics_to_json, write_nc_output, ) +from pcmdi_metrics.mean_climate.lib import pmp_parser + # To avoid below error # OpenBLAS blas_thread_init: pthread_create failed for thread XX of 96: Resource temporarily unavailable @@ -106,7 +108,7 @@ # ================================================= # Collect user defined options # ------------------------------------------------- -P = pcmdi_metrics.driver.pmp_parser.PMPParser( +P = pmp_parser.PMPParser( description="Runs PCMDI Modes of Variability Computations", formatter_class=RawTextHelpFormatter, ) diff --git a/setup.py b/setup.py index 12f7e392f..4a1979815 100644 --- a/setup.py +++ b/setup.py @@ -1,7 +1,5 @@ from __future__ import print_function -import glob -import os import subprocess import sys @@ -51,19 +49,19 @@ p = subprocess.Popen(["python", "setup_default_args.py"], cwd="share") p.communicate() -packages = find_packages(exclude=["cmec", "tests"]) +packages = find_packages(exclude=["cmec", "tests"], include=["pcmdi_metrics*"]) scripts = [ - "pcmdi_metrics/pcmdi/scripts/mean_climate_driver.py", - "pcmdi_metrics/pcmdi/scripts/pcmdi_compute_climatologies.py", - "pcmdi_metrics/misc/scripts/parallelize_driver.py", - "pcmdi_metrics/misc/scripts/get_pmp_data.py", + "pcmdi_metrics/mean_climate/pcmdi_compute_climatologies.py", + "pcmdi_metrics/mean_climate/mean_climate_driver.py", "pcmdi_metrics/monsoon_wang/scripts/mpindex_compute.py", "pcmdi_metrics/monsoon_sperber/scripts/driver_monsoon_sperber.py", - "pcmdi_metrics/mjo/scripts/mjo_metrics_driver.py", + "pcmdi_metrics/mjo/mjo_metrics_driver.py", "pcmdi_metrics/variability_mode/variability_modes_driver.py", "pcmdi_metrics/enso/enso_driver.py", "pcmdi_metrics/precip_variability/variability_across_timescales_PS_driver.py", + "pcmdi_metrics/misc/scripts/parallelize_driver.py", + "pcmdi_metrics/misc/scripts/get_pmp_data.py", "pcmdi_metrics/precip_distribution/precip_distribution_driver.py", "pcmdi_metrics/cloud_feedback/cloud_feedback_driver.py", ] @@ -116,35 +114,6 @@ ), ) -if install_dev: - print("Adding experimental packages") - dev_packages = glob.glob("src/python/devel/*") - dev_packages.remove("src/python/devel/example_dev") - for p in dev_packages: - if not os.path.isdir(p): - dev_packages.pop(p) - dev_scripts = [] - for p in dev_packages: - scripts = glob.glob(os.path.join(p, "scripts", "*")) - dev_scripts += scripts - dev_pkg = {} - dev_data = [] - for p in dev_packages: - nm = p.replace("/", ".") - nm = nm.replace("src.python.devel", "pcmdi_metrics") - pnm = nm.split(".")[-1] - pkg_dir = os.path.join(p, "lib") - dev_pkg[nm] = pkg_dir - data = glob.glob(os.path.join(p, "data", "*")) - for d in data: - dir_nm = os.path.split(d)[-1] - dev_data.append( - [os.path.join(dir_nm, pnm), glob.glob(os.path.join(d, "*"))] - ) - packages.update(dev_pkg) - data_files += dev_data - scripts += dev_scripts - setup( name="pcmdi_metrics", version=release_version, @@ -155,14 +124,4 @@ scripts=scripts, data_files=data_files, entry_points=entry_points, - # include_dirs = [numpy.lib.utils.get_include()], - # ext_modules = [ - # Extension('pcmdi_metrics.exts', - # ['src/C/add.c',], - # library_dirs = [], - # libraries = [], - # define_macros = [], - # extra_compile_args = [], - # extra_link_args = [], - # ] )