From 3ccffeee120340ab580fc9d96b552970c9f42a8f Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Mon, 18 Mar 2024 09:30:35 -0400 Subject: [PATCH] Parse jediyaml only once (#2387) `JEDIYAML` was being parsed 3 times; once in `get_obs_dict`, second in `get_bias_dict` and a third time in `initialize` for the specific component analysis task. This PR: - eliminates the duplications and constructs the `jedi_config` dictionary just once. The dictionary is written out before calling the executable. - updates hash to gdasapp - updates configs for snow, aerosol, atmvar and atmens JEDI-DA to include `JEDI_FIX_YAML` and `CRTM_FIX_YAML` . This allows greater flexibility and control over the contents of these fix data sets to be copied into the run directory. - Combines snowDA and aerosolDA into a single test Co-authored-by: Cory Martin Co-authored-by: David Huber <69919478+DavidHuber-NOAA@users.noreply.github.com> --- ci/Jenkinsfile | 4 +- ..._atmsnowDA.yaml => C96_atmaerosnowDA.yaml} | 4 +- ...ci.yaml => atmaerosnowDA_defaults_ci.yaml} | 0 parm/config/gfs/config.aeroanl | 16 +- parm/config/gfs/config.aeroanlfinal | 2 +- parm/config/gfs/config.aeroanlinit | 2 +- parm/config/gfs/config.aeroanlrun | 2 +- parm/config/gfs/config.atmanl | 3 + parm/config/gfs/config.atmensanl | 3 + parm/config/gfs/config.snowanl | 2 + parm/gdas/aero_crtm_coeff.yaml | 13 -- parm/gdas/aero_crtm_coeff.yaml.j2 | 13 ++ parm/gdas/aero_jedi_fix.yaml | 11 -- ...tm_jedi_fix.yaml => aero_jedi_fix.yaml.j2} | 0 ...crtm_coeff.yaml => atm_crtm_coeff.yaml.j2} | 0 parm/gdas/atm_jedi_fix.yaml.j2 | 7 + parm/gdas/snow_jedi_fix.yaml.j2 | 8 +- sorc/gdas.cd | 2 +- ush/forecast_postdet.sh | 4 +- ush/forecast_predet.sh | 52 +++---- ush/python/pygfs/task/aero_analysis.py | 29 ++-- ush/python/pygfs/task/analysis.py | 137 +++++++++++++++--- ush/python/pygfs/task/atm_analysis.py | 27 ++-- ush/python/pygfs/task/atmens_analysis.py | 27 ++-- ush/python/pygfs/task/snow_analysis.py | 12 +- 25 files changed, 235 insertions(+), 145 deletions(-) rename ci/cases/pr/{C96_atmsnowDA.yaml => C96_atmaerosnowDA.yaml} (81%) rename ci/cases/yamls/{atmsnowDA_defaults_ci.yaml => atmaerosnowDA_defaults_ci.yaml} (100%) delete mode 100644 parm/gdas/aero_crtm_coeff.yaml create mode 100644 parm/gdas/aero_crtm_coeff.yaml.j2 delete mode 100644 parm/gdas/aero_jedi_fix.yaml rename parm/gdas/{atm_jedi_fix.yaml => aero_jedi_fix.yaml.j2} (100%) rename parm/gdas/{atm_crtm_coeff.yaml => atm_crtm_coeff.yaml.j2} (100%) create mode 100644 parm/gdas/atm_jedi_fix.yaml.j2 diff --git a/ci/Jenkinsfile b/ci/Jenkinsfile index bda7b5c694..6f27804a3c 100644 --- a/ci/Jenkinsfile +++ b/ci/Jenkinsfile @@ -119,7 +119,7 @@ pipeline { axis { name 'Case' // TODO add dynamic list of cases from env vars (needs addtional plugins) - values 'C48C48_ufs_hybatmDA', 'C48_ATM', 'C48_S2SW', 'C48_S2SWA_gefs', 'C48mx500_3DVarAOWCDA', 'C96C48_hybatmDA', 'C96_atm3DVar', 'C96_atmsnowDA' + values 'C48C48_ufs_hybatmDA', 'C48_ATM', 'C48_S2SW', 'C48_S2SWA_gefs', 'C48mx500_3DVarAOWCDA', 'C96C48_hybatmDA', 'C96_atm3DVar', 'C96_atmaerosnowDA' } } stages { @@ -166,7 +166,7 @@ pipeline { for (line in lines) { echo "archiving: ${line}" archiveArtifacts artifacts: "${line}", fingerprint: true - } + } } } error("Failed to run experiments ${Case} on ${Machine}") diff --git a/ci/cases/pr/C96_atmsnowDA.yaml b/ci/cases/pr/C96_atmaerosnowDA.yaml similarity index 81% rename from ci/cases/pr/C96_atmsnowDA.yaml rename to ci/cases/pr/C96_atmaerosnowDA.yaml index 35fcc10fb2..7e22955a37 100644 --- a/ci/cases/pr/C96_atmsnowDA.yaml +++ b/ci/cases/pr/C96_atmaerosnowDA.yaml @@ -4,7 +4,7 @@ experiment: arguments: pslot: {{ 'pslot' | getenv }} - app: ATM + app: ATMA resdetatmos: 96 comroot: {{ 'RUNTESTS' | getenv }}/COMROOT expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR @@ -14,7 +14,7 @@ arguments: nens: 0 gfs_cyc: 1 start: cold - yaml: {{ HOMEgfs }}/ci/cases/yamls/atmsnowDA_defaults_ci.yaml + yaml: {{ HOMEgfs }}/ci/cases/yamls/atmaerosnowDA_defaults_ci.yaml skip_ci_on_hosts: - orion diff --git a/ci/cases/yamls/atmsnowDA_defaults_ci.yaml b/ci/cases/yamls/atmaerosnowDA_defaults_ci.yaml similarity index 100% rename from ci/cases/yamls/atmsnowDA_defaults_ci.yaml rename to ci/cases/yamls/atmaerosnowDA_defaults_ci.yaml diff --git a/parm/config/gfs/config.aeroanl b/parm/config/gfs/config.aeroanl index cf7981f807..972f393feb 100644 --- a/parm/config/gfs/config.aeroanl +++ b/parm/config/gfs/config.aeroanl @@ -6,24 +6,26 @@ echo "BEGIN: config.aeroanl" export CASE_ANL=${CASE} -export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ -export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export OBS_LIST="${PARMgfs}/gdas/aero/obs/lists/gdas_aero.yaml.j2" export STATICB_TYPE='identity' -export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml -export BERROR_DATA_DIR=${FIXgfs}/gdas/bump/aero/${CASE_ANL}/ +export BERROR_YAML="${PARMgfs}/gdas/aero/berror/staticb_${STATICB_TYPE}.yaml.j2" +export BERROR_DATA_DIR="${FIXgfs}/gdas/bump/aero/${CASE_ANL}/" export BERROR_DATE="20160630.000000" +export CRTM_FIX_YAML="${PARMgfs}/gdas/aero_crtm_coeff.yaml.j2" +export JEDI_FIX_YAML="${PARMgfs}/gdas/aero_jedi_fix.yaml.j2" + export io_layout_x=@IO_LAYOUT_X@ export io_layout_y=@IO_LAYOUT_Y@ -export JEDIEXE=${EXECgfs}/fv3jedi_var.x +export JEDIEXE="${EXECgfs}/fv3jedi_var.x" if [[ "${DOIAU}" == "YES" ]]; then export aero_bkg_times="3,6,9" - export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_fgat_gfs_aero.yaml + export JEDIYAML="${PARMgfs}/gdas/aero/variational/3dvar_fgat_gfs_aero.yaml.j2" else export aero_bkg_times="6" - export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml + export JEDIYAML="${PARMgfs}/gdas/aero/variational/3dvar_gfs_aero.yaml.j2" fi echo "END: config.aeroanl" diff --git a/parm/config/gfs/config.aeroanlfinal b/parm/config/gfs/config.aeroanlfinal index 230ec5205a..34e5d8f116 100644 --- a/parm/config/gfs/config.aeroanlfinal +++ b/parm/config/gfs/config.aeroanlfinal @@ -6,5 +6,5 @@ echo "BEGIN: config.aeroanlfinal" # Get task specific resources -. $EXPDIR/config.resources aeroanlfinal +source "${EXPDIR}/config.resources" aeroanlfinal echo "END: config.aeroanlfinal" diff --git a/parm/config/gfs/config.aeroanlinit b/parm/config/gfs/config.aeroanlinit index 72175b8d0c..7036d3d27b 100644 --- a/parm/config/gfs/config.aeroanlinit +++ b/parm/config/gfs/config.aeroanlinit @@ -6,5 +6,5 @@ echo "BEGIN: config.aeroanlinit" # Get task specific resources -. $EXPDIR/config.resources aeroanlinit +source "${EXPDIR}/config.resources" aeroanlinit echo "END: config.aeroanlinit" diff --git a/parm/config/gfs/config.aeroanlrun b/parm/config/gfs/config.aeroanlrun index da13df2831..012e5b79f3 100644 --- a/parm/config/gfs/config.aeroanlrun +++ b/parm/config/gfs/config.aeroanlrun @@ -6,6 +6,6 @@ echo "BEGIN: config.aeroanlrun" # Get task specific resources -. $EXPDIR/config.resources aeroanlrun +source "${EXPDIR}/config.resources" aeroanlrun echo "END: config.aeroanlrun" diff --git a/parm/config/gfs/config.atmanl b/parm/config/gfs/config.atmanl index 11358de8a8..7cfd0cb47f 100644 --- a/parm/config/gfs/config.atmanl +++ b/parm/config/gfs/config.atmanl @@ -19,6 +19,9 @@ else export BERROR_YAML="${PARMgfs}/gdas/atm/berror/staticb_${STATICB_TYPE}.yaml.j2" fi +export CRTM_FIX_YAML="${PARMgfs}/gdas/atm_crtm_coeff.yaml.j2" +export JEDI_FIX_YAML="${PARMgfs}/gdas/atm_jedi_fix.yaml.j2" + export layout_x_atmanl=@LAYOUT_X_ATMANL@ export layout_y_atmanl=@LAYOUT_Y_ATMANL@ diff --git a/parm/config/gfs/config.atmensanl b/parm/config/gfs/config.atmensanl index 49b903e4c0..8e824b22f6 100644 --- a/parm/config/gfs/config.atmensanl +++ b/parm/config/gfs/config.atmensanl @@ -9,6 +9,9 @@ export OBS_LIST="${PARMgfs}/gdas/atm/obs/lists/lgetkf_prototype.yaml.j2" export JEDIYAML="${PARMgfs}/gdas/atm/lgetkf/lgetkf.yaml.j2" export INTERP_METHOD='barycentric' +export CRTM_FIX_YAML="${PARMgfs}/gdas/atm_crtm_coeff.yaml.j2" +export JEDI_FIX_YAML="${PARMgfs}/gdas/atm_jedi_fix.yaml.j2" + export layout_x_atmensanl=@LAYOUT_X_ATMENSANL@ export layout_y_atmensanl=@LAYOUT_Y_ATMENSANL@ diff --git a/parm/config/gfs/config.snowanl b/parm/config/gfs/config.snowanl index 30e6d9c07b..7b3ffa47f3 100644 --- a/parm/config/gfs/config.snowanl +++ b/parm/config/gfs/config.snowanl @@ -22,6 +22,8 @@ export BESTDDEV="30." # Background Error Std. Dev. for LETKFOI export APPLY_INCR_EXE="${EXECgfs}/apply_incr.exe" export APPLY_INCR_NML_TMPL="${PARMgfs}/gdas/snow/letkfoi/apply_incr_nml.j2" +export JEDI_FIX_YAML="${PARMgfs}/gdas/snow_jedi_fix.yaml.j2" + export io_layout_x=@IO_LAYOUT_X@ export io_layout_y=@IO_LAYOUT_Y@ diff --git a/parm/gdas/aero_crtm_coeff.yaml b/parm/gdas/aero_crtm_coeff.yaml deleted file mode 100644 index 75b54c3741..0000000000 --- a/parm/gdas/aero_crtm_coeff.yaml +++ /dev/null @@ -1,13 +0,0 @@ -mkdir: -- {{ DATA }}/crtm/ -copy: -- [{{ CRTM_FIX }}/AerosolCoeff.bin, {{ DATA }}/crtm/] -- [{{ CRTM_FIX }}/CloudCoeff.bin, {{ DATA }}/crtm/] -- [{{ CRTM_FIX }}/v.viirs-m_npp.SpcCoeff.bin, {{ DATA }}/crtm/] -- [{{ CRTM_FIX }}/v.viirs-m_npp.TauCoeff.bin, {{ DATA }}/crtm/] -- [{{ CRTM_FIX }}/v.viirs-m_j1.SpcCoeff.bin, {{ DATA }}/crtm/] -- [{{ CRTM_FIX }}/v.viirs-m_j1.TauCoeff.bin, {{ DATA }}/crtm/] -- [{{ CRTM_FIX }}/NPOESS.VISice.EmisCoeff.bin, {{ DATA }}/crtm/] -- [{{ CRTM_FIX }}/NPOESS.VISland.EmisCoeff.bin, {{ DATA }}/crtm/] -- [{{ CRTM_FIX }}/NPOESS.VISsnow.EmisCoeff.bin, {{ DATA }}/crtm/] -- [{{ CRTM_FIX }}/NPOESS.VISwater.EmisCoeff.bin, {{ DATA }}/crtm/] diff --git a/parm/gdas/aero_crtm_coeff.yaml.j2 b/parm/gdas/aero_crtm_coeff.yaml.j2 new file mode 100644 index 0000000000..b48d8ff231 --- /dev/null +++ b/parm/gdas/aero_crtm_coeff.yaml.j2 @@ -0,0 +1,13 @@ +mkdir: +- '{{ DATA }}/crtm/' +copy: +- ['{{ CRTM_FIX }}/AerosolCoeff.bin', '{{ DATA }}/crtm/'] +- ['{{ CRTM_FIX }}/CloudCoeff.bin', '{{ DATA }}/crtm/'] +- ['{{ CRTM_FIX }}/v.viirs-m_npp.SpcCoeff.bin', '{{ DATA }}/crtm/'] +- ['{{ CRTM_FIX }}/v.viirs-m_npp.TauCoeff.bin', '{{ DATA }}/crtm/'] +- ['{{ CRTM_FIX }}/v.viirs-m_j1.SpcCoeff.bin', '{{ DATA }}/crtm/'] +- ['{{ CRTM_FIX }}/v.viirs-m_j1.TauCoeff.bin', '{{ DATA }}/crtm/'] +- ['{{ CRTM_FIX }}/NPOESS.VISice.EmisCoeff.bin', '{{ DATA }}/crtm/'] +- ['{{ CRTM_FIX }}/NPOESS.VISland.EmisCoeff.bin', '{{ DATA }}/crtm/'] +- ['{{ CRTM_FIX }}/NPOESS.VISsnow.EmisCoeff.bin', '{{ DATA }}/crtm/'] +- ['{{ CRTM_FIX }}/NPOESS.VISwater.EmisCoeff.bin', '{{ DATA }}/crtm/'] diff --git a/parm/gdas/aero_jedi_fix.yaml b/parm/gdas/aero_jedi_fix.yaml deleted file mode 100644 index 16cbeac6e7..0000000000 --- a/parm/gdas/aero_jedi_fix.yaml +++ /dev/null @@ -1,11 +0,0 @@ -mkdir: -- !ENV ${DATA}/fv3jedi -copy: -- - !ENV ${FIXgfs}/gdas/fv3jedi/fv3files/akbk$(npz).nc4 - - !ENV ${DATA}/fv3jedi/akbk.nc4 -- - !ENV ${FIXgfs}/gdas/fv3jedi/fv3files/fmsmpp.nml - - !ENV ${DATA}/fv3jedi/fmsmpp.nml -- - !ENV ${FIXgfs}/gdas/fv3jedi/fv3files/field_table_gfdl - - !ENV ${DATA}/fv3jedi/field_table -- - !ENV $(HOMEgfs)/sorc/gdas.cd/parm/io/fv3jedi_fieldmetadata_restart.yaml - - !ENV ${DATA}/fv3jedi/fv3jedi_fieldmetadata_restart.yaml diff --git a/parm/gdas/atm_jedi_fix.yaml b/parm/gdas/aero_jedi_fix.yaml.j2 similarity index 100% rename from parm/gdas/atm_jedi_fix.yaml rename to parm/gdas/aero_jedi_fix.yaml.j2 diff --git a/parm/gdas/atm_crtm_coeff.yaml b/parm/gdas/atm_crtm_coeff.yaml.j2 similarity index 100% rename from parm/gdas/atm_crtm_coeff.yaml rename to parm/gdas/atm_crtm_coeff.yaml.j2 diff --git a/parm/gdas/atm_jedi_fix.yaml.j2 b/parm/gdas/atm_jedi_fix.yaml.j2 new file mode 100644 index 0000000000..69039baddf --- /dev/null +++ b/parm/gdas/atm_jedi_fix.yaml.j2 @@ -0,0 +1,7 @@ +mkdir: +- '{{ DATA }}/fv3jedi' +copy: +- ['{{ FIXgfs }}/gdas/fv3jedi/fv3files/akbk{{ npz }}.nc4', '{{ DATA }}/fv3jedi/akbk.nc4'] +- ['{{ FIXgfs }}/gdas/fv3jedi/fv3files/fmsmpp.nml', '{{ DATA }}/fv3jedi/fmsmpp.nml'] +- ['{{ FIXgfs }}/gdas/fv3jedi/fv3files/field_table_gfdl', '{{ DATA }}/fv3jedi/field_table'] +- ['{{ PARMgfs }}/gdas/io/fv3jedi_fieldmetadata_restart.yaml', '{{ DATA }}/fv3jedi/fv3jedi_fieldmetadata_restart.yaml'] diff --git a/parm/gdas/snow_jedi_fix.yaml.j2 b/parm/gdas/snow_jedi_fix.yaml.j2 index 4d820a82ba..69039baddf 100644 --- a/parm/gdas/snow_jedi_fix.yaml.j2 +++ b/parm/gdas/snow_jedi_fix.yaml.j2 @@ -1,7 +1,7 @@ mkdir: - '{{ DATA }}/fv3jedi' copy: -- ['{{ HOMEgfs }}/fix/gdas/fv3jedi/fv3files/akbk{{ npz }}.nc4', '{{ DATA }}/fv3jedi/akbk.nc4'] -- ['{{ HOMEgfs }}/fix/gdas/fv3jedi/fv3files/fmsmpp.nml', '{{ DATA }}/fv3jedi/fmsmpp.nml'] -- ['{{ HOMEgfs }}/fix/gdas/fv3jedi/fv3files/field_table_gfdl', '{{ DATA }}/fv3jedi/field_table'] -- ['{{ HOMEgfs }}/sorc/gdas.cd/parm/io/fv3jedi_fieldmetadata_restart.yaml', '{{ DATA }}/fv3jedi/fv3jedi_fieldmetadata_restart.yaml'] +- ['{{ FIXgfs }}/gdas/fv3jedi/fv3files/akbk{{ npz }}.nc4', '{{ DATA }}/fv3jedi/akbk.nc4'] +- ['{{ FIXgfs }}/gdas/fv3jedi/fv3files/fmsmpp.nml', '{{ DATA }}/fv3jedi/fmsmpp.nml'] +- ['{{ FIXgfs }}/gdas/fv3jedi/fv3files/field_table_gfdl', '{{ DATA }}/fv3jedi/field_table'] +- ['{{ PARMgfs }}/gdas/io/fv3jedi_fieldmetadata_restart.yaml', '{{ DATA }}/fv3jedi/fv3jedi_fieldmetadata_restart.yaml'] diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 8a6f825f6d..dd350d7e4d 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 8a6f825f6d988c81fad11070de92a2744d5a53cc +Subproject commit dd350d7e4daab0977407e388711807f13b204f6f diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index a5fee9a7fd..1e244ab433 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -931,7 +931,7 @@ GOCART_rc() { GOCART_postdet() { echo "SUB ${FUNCNAME[0]}: Linking output data for GOCART" - for fhr in ${FV3_OUTPUT_FH}; do + for fhr in ${GOCART_OUTPUT_FH}; do local vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H) # Temporarily delete existing files due to noclobber in GOCART @@ -952,7 +952,7 @@ GOCART_out() { # TO DO: this should be linked but there were issues where gocart was crashing if it was linked local fhr local vdate - for fhr in ${FV3_OUTPUT_FH}; do + for fhr in ${GOCART_OUTPUT_FH}; do if (( fhr == 0 )); then continue; fi vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H) ${NCP} "${DATA}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" \ diff --git a/ush/forecast_predet.sh b/ush/forecast_predet.sh index ab02270b46..b5e1ad8e82 100755 --- a/ush/forecast_predet.sh +++ b/ush/forecast_predet.sh @@ -10,30 +10,33 @@ to_seconds() { # Function to convert HHMMSS to seconds since 00Z - local hhmmss=${1:?} - local hh=${hhmmss:0:2} - local mm=${hhmmss:2:2} - local ss=${hhmmss:4:2} - local seconds=$((10#${hh}*3600+10#${mm}*60+10#${ss})) - local padded_seconds=$(printf "%05d" "${seconds}") + local hhmmss hh mm ss seconds padded_seconds + hhmmss=${1:?} + hh=${hhmmss:0:2} + mm=${hhmmss:2:2} + ss=${hhmmss:4:2} + seconds=$((10#${hh}*3600+10#${mm}*60+10#${ss})) + padded_seconds=$(printf "%05d" "${seconds}") echo "${padded_seconds}" } middle_date(){ # Function to calculate mid-point date in YYYYMMDDHH between two dates also in YYYYMMDDHH - local date1=${1:?} - local date2=${2:?} - local date1s=$(date --utc -d "${date1:0:8} ${date1:8:2}:00:00" +%s) - local date2s=$(date --utc -d "${date2:0:8} ${date2:8:2}:00:00" +%s) - local dtsecsby2=$(( $((date2s - date1s)) / 2 )) - local mid_date=$(date --utc -d "${date1:0:8} ${date1:8:2} + ${dtsecsby2} seconds" +%Y%m%d%H%M%S) + local date1 date2 date1s date2s dtsecsby2 mid_date + date1=${1:?} + date2=${2:?} + date1s=$(date --utc -d "${date1:0:8} ${date1:8:2}:00:00" +%s) + date2s=$(date --utc -d "${date2:0:8} ${date2:8:2}:00:00" +%s) + dtsecsby2=$(( $((date2s - date1s)) / 2 )) + mid_date=$(date --utc -d "${date1:0:8} ${date1:8:2} + ${dtsecsby2} seconds" +%Y%m%d%H%M%S) echo "${mid_date:0:10}" } nhour(){ # Function to calculate hours between two dates (This replicates prod-util NHOUR) - local date1=${1:?} - local date2=${2:?} + local date1 date2 seconds1 seconds2 hours + date1=${1:?} + date2=${2:?} # Convert dates to UNIX timestamps seconds1=$(date --utc -d "${date1:0:8} ${date1:8:2}:00:00" +%s) seconds2=$(date --utc -d "${date2:0:8} ${date2:8:2}:00:00" +%s) @@ -41,21 +44,17 @@ nhour(){ echo "${hours}" } +# shellcheck disable=SC2034 common_predet(){ echo "SUB ${FUNCNAME[0]}: Defining variables for shared through model components" - # Ignore "not used" warning - # shellcheck disable=SC2034 pwd=$(pwd) CDUMP=${CDUMP:-gdas} - CASE=${CASE:-C96} CDATE=${CDATE:-"${PDY}${cyc}"} ENSMEM=${ENSMEM:-000} # Define significant cycles current_cycle="${PDY}${cyc}" previous_cycle=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} - ${assim_freq} hours" +%Y%m%d%H) - # ignore errors that variable isn't used - # shellcheck disable=SC2034 next_cycle=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${assim_freq} hours" +%Y%m%d%H) forecast_end_cycle=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${FHMAX} hours" +%Y%m%d%H) @@ -89,6 +88,7 @@ common_predet(){ cd "${DATA}" || ( echo "FATAL ERROR: Unable to 'cd ${DATA}', ABORT!"; exit 8 ) } +# shellcheck disable=SC2034 FV3_predet(){ echo "SUB ${FUNCNAME[0]}: Defining variables for FV3" @@ -105,8 +105,6 @@ FV3_predet(){ fi # Convert output settings into an explicit list for FV3 - # Ignore "not used" warning - # shellcheck disable=SC2034 FV3_OUTPUT_FH="" local fhr=${FHMIN} if (( FHOUT_HF > 0 && FHMAX_HF > 0 )); then @@ -116,8 +114,6 @@ FV3_predet(){ FV3_OUTPUT_FH="${FV3_OUTPUT_FH} $(seq -s ' ' "${fhr}" "${FHOUT}" "${FHMAX}")" # Other options - # ignore errors that variable isn't used - # shellcheck disable=SC2034 MEMBER=$(( 10#${ENSMEM:-"-1"} )) # -1: control, 0: ensemble mean, >0: ensemble member $MEMBER PREFIX_ATMINC=${PREFIX_ATMINC:-""} # allow ensemble to use recentered increment @@ -169,7 +165,6 @@ FV3_predet(){ nstf_name=${nstf_name:-"${NST_MODEL},${NST_SPINUP},${NST_RESV},${ZSEA1},${ZSEA2}"} nst_anl=${nst_anl:-".false."} - # blocking factor used for threading and general physics performance #nyblocks=$(expr \( $npy - 1 \) \/ $layout_y ) #nxblocks=$(expr \( $npx - 1 \) \/ $layout_x \/ 32) @@ -215,6 +210,7 @@ WW3_predet(){ ${NLN} "${COM_WAVE_RESTART}" "restart_wave" } +# shellcheck disable=SC2034 CICE_predet(){ echo "SUB ${FUNCNAME[0]}: CICE before run type determination" @@ -227,12 +223,11 @@ CICE_predet(){ # CICE does not have a concept of high frequency output like FV3 # Convert output settings into an explicit list for CICE - # Ignore "not used" warning - # shellcheck disable=SC2034 CICE_OUTPUT_FH=$(seq -s ' ' "${FHMIN}" "${FHOUT_OCNICE}" "${FHMAX}") } +# shellcheck disable=SC2034 MOM6_predet(){ echo "SUB ${FUNCNAME[0]}: MOM6 before run type determination" @@ -245,8 +240,6 @@ MOM6_predet(){ # MOM6 does not have a concept of high frequency output like FV3 # Convert output settings into an explicit list for MOM6 - # Ignore "not used" warning - # shellcheck disable=SC2034 MOM6_OUTPUT_FH=$(seq -s ' ' "${FHMIN}" "${FHOUT_OCNICE}" "${FHMAX}") } @@ -260,9 +253,12 @@ CMEPS_predet(){ } +# shellcheck disable=SC2034 GOCART_predet(){ echo "SUB ${FUNCNAME[0]}: GOCART before run type determination" if [[ ! -d "${COM_CHEM_HISTORY}" ]]; then mkdir -p "${COM_CHEM_HISTORY}"; fi + GOCART_OUTPUT_FH=$(seq -s ' ' "${FHMIN}" "6" "${FHMAX}") + # TODO: AERO_HISTORY.rc has hardwired output frequency to 6 hours } diff --git a/ush/python/pygfs/task/aero_analysis.py b/ush/python/pygfs/task/aero_analysis.py index 0e515a0df4..a61b7c82f3 100644 --- a/ush/python/pygfs/task/aero_analysis.py +++ b/ush/python/pygfs/task/aero_analysis.py @@ -12,7 +12,7 @@ add_to_datetime, to_fv3time, to_timedelta, chdir, to_fv3time, - YAMLFile, parse_yamltmpl, parse_j2yaml, save_as_yaml, + YAMLFile, parse_j2yaml, save_as_yaml, logit, Executable, WorkflowException) @@ -32,7 +32,7 @@ def __init__(self, config): _res = int(self.config['CASE'][1:]) _res_anl = int(self.config['CASE_ANL'][1:]) _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config['assim_freq']}H") / 2) - _fv3jedi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.CDUMP}.t{self.runtime_config['cyc']:02d}z.aerovar.yaml") + _jedi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.CDUMP}.t{self.runtime_config['cyc']:02d}z.aerovar.yaml") # Create a local dictionary that is repeatedly used across this class local_dict = AttrDict( @@ -46,11 +46,11 @@ def __init__(self, config): 'npz_anl': self.config['LEVS'] - 1, 'AERO_WINDOW_BEGIN': _window_begin, 'AERO_WINDOW_LENGTH': f"PT{self.config['assim_freq']}H", - 'aero_bkg_fhr': map(int, self.config['aero_bkg_times'].split(',')), + 'aero_bkg_fhr': map(int, str(self.config['aero_bkg_times']).split(',')), 'OPREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN 'APREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN 'GPREFIX': f"gdas.t{self.runtime_config.previous_cycle.hour:02d}z.", - 'fv3jedi_yaml': _fv3jedi_yaml, + 'jedi_yaml': _jedi_yaml, } ) @@ -73,15 +73,13 @@ def initialize(self: Analysis) -> None: super().initialize() # stage CRTM fix files - crtm_fix_list_path = os.path.join(self.task_config['HOMEgfs'], 'parm', 'gdas', 'aero_crtm_coeff.yaml') - logger.debug(f"Staging CRTM fix files from {crtm_fix_list_path}") - crtm_fix_list = parse_j2yaml(crtm_fix_list_path, self.task_config) + logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}") + crtm_fix_list = parse_j2yaml(self.task_config.CRTM_FIX_YAML, self.task_config) FileHandler(crtm_fix_list).sync() # stage fix files - jedi_fix_list_path = os.path.join(self.task_config['HOMEgfs'], 'parm', 'gdas', 'aero_jedi_fix.yaml') - logger.debug(f"Staging JEDI fix files from {jedi_fix_list_path}") - jedi_fix_list = parse_j2yaml(jedi_fix_list_path, self.task_config) + logger.info(f"Staging JEDI fix files from {self.task_config.JEDI_FIX_YAML}") + jedi_fix_list = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config) FileHandler(jedi_fix_list).sync() # stage berror files @@ -93,10 +91,9 @@ def initialize(self: Analysis) -> None: FileHandler(self.get_bkg_dict(AttrDict(self.task_config, **self.task_config))).sync() # generate variational YAML file - logger.debug(f"Generate variational YAML file: {self.task_config.fv3jedi_yaml}") - varda_yaml = parse_j2yaml(self.task_config['AEROVARYAML'], self.task_config) - save_as_yaml(varda_yaml, self.task_config.fv3jedi_yaml) - logger.info(f"Wrote variational YAML to: {self.task_config.fv3jedi_yaml}") + logger.debug(f"Generate variational YAML file: {self.task_config.jedi_yaml}") + save_as_yaml(self.task_config.jedi_config, self.task_config.jedi_yaml) + logger.info(f"Wrote variational YAML to: {self.task_config.jedi_yaml}") # need output dir for diags and anl logger.debug("Create empty output [anl, diags] directories to receive output from executable") @@ -114,7 +111,7 @@ def execute(self: Analysis) -> None: exec_cmd = Executable(self.task_config.APRUN_AEROANL) exec_name = os.path.join(self.task_config.DATA, 'fv3jedi_var.x') exec_cmd.add_default_arg(exec_name) - exec_cmd.add_default_arg(self.task_config.fv3jedi_yaml) + exec_cmd.add_default_arg(self.task_config.jedi_yaml) try: logger.debug(f"Executing {exec_cmd}") @@ -212,7 +209,7 @@ def _add_fms_cube_sphere_increments(self: Analysis) -> None: inc_template = os.path.join(self.task_config.DATA, 'anl', 'aeroinc.' + increment_template) bkg_template = os.path.join(self.task_config.COM_ATMOS_RESTART_PREV, restart_template) # get list of increment vars - incvars_list_path = os.path.join(self.task_config['HOMEgfs'], 'parm', 'gdas', 'aeroanl_inc_vars.yaml') + incvars_list_path = os.path.join(self.task_config['PARMgfs'], 'gdas', 'aeroanl_inc_vars.yaml') incvars = YAMLFile(path=incvars_list_path)['incvars'] super().add_fv3_increments(inc_template, bkg_template, incvars) diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py index 5709bc130e..2221fb7b34 100644 --- a/ush/python/pygfs/task/analysis.py +++ b/ush/python/pygfs/task/analysis.py @@ -4,6 +4,7 @@ import glob import tarfile from logging import getLogger +from pprint import pformat from netCDF4 import Dataset from typing import List, Dict, Any, Union @@ -26,10 +27,14 @@ def __init__(self, config: Dict[str, Any]) -> None: super().__init__(config) self.config.ntiles = 6 # Store location of GDASApp jinja2 templates - self.gdasapp_j2tmpl_dir = os.path.join(self.config.HOMEgfs, 'parm/gdas') + self.gdasapp_j2tmpl_dir = os.path.join(self.config.PARMgfs, 'gdas') def initialize(self) -> None: super().initialize() + + # all JEDI analyses need a JEDI config + self.task_config.jedi_config = self.get_jedi_config() + # all analyses need to stage observations obs_dict = self.get_obs_dict() FileHandler(obs_dict).sync() @@ -41,13 +46,33 @@ def initialize(self) -> None: # link jedi executable to run directory self.link_jediexe() + @logit(logger) + def get_jedi_config(self) -> Dict[str, Any]: + """Compile a dictionary of JEDI configuration from JEDIYAML template file + + Parameters + ---------- + + Returns + ---------- + jedi_config : Dict + a dictionary containing the fully rendered JEDI yaml configuration + """ + + # generate JEDI YAML file + logger.info(f"Generate JEDI YAML config: {self.task_config.jedi_yaml}") + jedi_config = parse_j2yaml(self.task_config.JEDIYAML, self.task_config, searchpath=self.gdasapp_j2tmpl_dir) + logger.debug(f"JEDI config:\n{pformat(jedi_config)}") + + return jedi_config + @logit(logger) def get_obs_dict(self) -> Dict[str, Any]: """Compile a dictionary of observation files to copy - This method uses the OBS_LIST configuration variable to generate a dictionary - from a list of YAML files that specify what observation files are to be - copied to the run directory from the observation input directory + This method extracts 'observers' from the JEDI yaml and from that list, extracts a list of + observation files that are to be copied to the run directory + from the observation input directory Parameters ---------- @@ -57,13 +82,13 @@ def get_obs_dict(self) -> Dict[str, Any]: obs_dict: Dict a dictionary containing the list of observation files to copy for FileHandler """ - logger.debug(f"OBS_LIST: {self.task_config['OBS_LIST']}") - obs_list_config = parse_j2yaml(self.task_config["OBS_LIST"], self.task_config, searchpath=self.gdasapp_j2tmpl_dir) - logger.debug(f"obs_list_config: {obs_list_config}") - # get observers from master dictionary - observers = obs_list_config['observers'] + + logger.info(f"Extracting a list of observation files from {self.task_config.JEDIYAML}") + observations = find_value_in_nested_dict(self.task_config.jedi_config, 'observations') + logger.debug(f"observations:\n{pformat(observations)}") + copylist = [] - for ob in observers: + for ob in observations['observers']: obfile = ob['obs space']['obsdatain']['engine']['obsfile'] basename = os.path.basename(obfile) copylist.append([os.path.join(self.task_config['COM_OBS'], basename), obfile]) @@ -77,9 +102,11 @@ def get_obs_dict(self) -> Dict[str, Any]: def get_bias_dict(self) -> Dict[str, Any]: """Compile a dictionary of observation files to copy - This method uses the OBS_LIST configuration variable to generate a dictionary - from a list of YAML files that specify what observation bias correction files - are to be copied to the run directory from the observation input directory + This method extracts 'observers' from the JEDI yaml and from that list, extracts a list of + observation bias correction files that are to be copied to the run directory + from the component directory. + TODO: COM_ATMOS_ANALYSIS_PREV is hardwired here and this method is not appropriate in + `analysis.py` and should be implemented in the component where this is applicable. Parameters ---------- @@ -89,13 +116,13 @@ def get_bias_dict(self) -> Dict[str, Any]: bias_dict: Dict a dictionary containing the list of observation bias files to copy for FileHandler """ - logger.debug(f"OBS_LIST: {self.task_config['OBS_LIST']}") - obs_list_config = parse_j2yaml(self.task_config["OBS_LIST"], self.task_config, searchpath=self.gdasapp_j2tmpl_dir) - logger.debug(f"obs_list_config: {obs_list_config}") - # get observers from master dictionary - observers = obs_list_config['observers'] + + logger.info(f"Extracting a list of bias correction files from {self.task_config.JEDIYAML}") + observations = find_value_in_nested_dict(self.task_config.jedi_config, 'observations') + logger.debug(f"observations:\n{pformat(observations)}") + copylist = [] - for ob in observers: + for ob in observations['observers']: if 'obs bias' in ob.keys(): obfile = ob['obs bias']['input file'] obdir = os.path.dirname(obfile) @@ -104,6 +131,7 @@ def get_bias_dict(self) -> Dict[str, Any]: for file in ['satbias.nc', 'satbias_cov.nc', 'tlapse.txt']: bfile = f"{prefix}.{file}" copylist.append([os.path.join(self.task_config.COM_ATMOS_ANALYSIS_PREV, bfile), os.path.join(obdir, bfile)]) + # TODO: Why is this specific to ATMOS? bias_dict = { 'mkdir': [os.path.join(self.runtime_config.DATA, 'bc')], @@ -328,3 +356,74 @@ def tgz_diags(statfile: str, diagdir: str) -> None: # Add diag files to tarball for diagfile in diags: tgz.add(diagfile, arcname=os.path.basename(diagfile)) + + +@logit(logger) +def find_value_in_nested_dict(nested_dict: Dict, target_key: str) -> Any: + """ + Recursively search through a nested dictionary and return the value for the target key. + This returns the first target key it finds. So if a key exists in a subsequent + nested dictionary, it will not be found. + + Parameters + ---------- + nested_dict : Dict + Dictionary to search + target_key : str + Key to search for + + Returns + ------- + Any + Value of the target key + + Raises + ------ + KeyError + If key is not found in dictionary + + TODO: if this gives issues due to landing on an incorrect key in the nested + dictionary, we will have to implement a more concrete method to search for a key + given a more complete address. See resolved conversations in PR 2387 + + # Example usage: + nested_dict = { + 'a': { + 'b': { + 'c': 1, + 'd': { + 'e': 2, + 'f': 3 + } + }, + 'g': 4 + }, + 'h': { + 'i': 5 + }, + 'j': { + 'k': 6 + } + } + + user_key = input("Enter the key to search for: ") + result = find_value_in_nested_dict(nested_dict, user_key) + """ + + if not isinstance(nested_dict, dict): + raise TypeError(f"Input is not of type(dict)") + + result = nested_dict.get(target_key) + if result is not None: + return result + + for value in nested_dict.values(): + if isinstance(value, dict): + try: + result = find_value_in_nested_dict(value, target_key) + if result is not None: + return result + except KeyError: + pass + + raise KeyError(f"Key '{target_key}' not found in the nested dictionary") diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 5a90a89e34..6348bdf319 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -31,7 +31,7 @@ def __init__(self, config): _res = int(self.config.CASE[1:]) _res_anl = int(self.config.CASE_ANL[1:]) _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config.assim_freq}H") / 2) - _fv3jedi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.atmvar.yaml") + _jedi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.atmvar.yaml") # Create a local dictionary that is repeatedly used across this class local_dict = AttrDict( @@ -48,7 +48,7 @@ def __init__(self, config): 'OPREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN 'APREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN 'GPREFIX': f"gdas.t{self.runtime_config.previous_cycle.hour:02d}z.", - 'fv3jedi_yaml': _fv3jedi_yaml, + 'jedi_yaml': _jedi_yaml, } ) @@ -71,19 +71,17 @@ def initialize(self: Analysis) -> None: super().initialize() # stage CRTM fix files - crtm_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'gdas', 'atm_crtm_coeff.yaml') - logger.debug(f"Staging CRTM fix files from {crtm_fix_list_path}") - crtm_fix_list = parse_j2yaml(crtm_fix_list_path, self.task_config) + logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}") + crtm_fix_list = parse_j2yaml(self.task_config.CRTM_FIX_YAML, self.task_config) FileHandler(crtm_fix_list).sync() # stage fix files - jedi_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'gdas', 'atm_jedi_fix.yaml') - logger.debug(f"Staging JEDI fix files from {jedi_fix_list_path}") - jedi_fix_list = parse_j2yaml(jedi_fix_list_path, self.task_config) + logger.info(f"Staging JEDI fix files from {self.task_config.JEDI_FIX_YAML}") + jedi_fix_list = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config) FileHandler(jedi_fix_list).sync() # stage static background error files, otherwise it will assume ID matrix - logger.debug(f"Stage files for STATICB_TYPE {self.task_config.STATICB_TYPE}") + logger.info(f"Stage files for STATICB_TYPE {self.task_config.STATICB_TYPE}") FileHandler(self.get_berror_dict(self.task_config)).sync() # stage ensemble files for use in hybrid background error @@ -102,10 +100,9 @@ def initialize(self: Analysis) -> None: FileHandler(self.get_bkg_dict(AttrDict(self.task_config))).sync() # generate variational YAML file - logger.debug(f"Generate variational YAML file: {self.task_config.fv3jedi_yaml}") - varda_yaml = parse_j2yaml(self.task_config.JEDIYAML, self.task_config, searchpath=self.gdasapp_j2tmpl_dir) - save_as_yaml(varda_yaml, self.task_config.fv3jedi_yaml) - logger.info(f"Wrote variational YAML to: {self.task_config.fv3jedi_yaml}") + logger.debug(f"Generate variational YAML file: {self.task_config.jedi_yaml}") + save_as_yaml(self.task_config.jedi_config, self.task_config.jedi_yaml) + logger.info(f"Wrote variational YAML to: {self.task_config.jedi_yaml}") # need output dir for diags and anl logger.debug("Create empty output [anl, diags] directories to receive output from executable") @@ -123,7 +120,7 @@ def execute(self: Analysis) -> None: exec_cmd = Executable(self.task_config.APRUN_ATMANL) exec_name = os.path.join(self.task_config.DATA, 'fv3jedi_var.x') exec_cmd.add_default_arg(exec_name) - exec_cmd.add_default_arg(self.task_config.fv3jedi_yaml) + exec_cmd.add_default_arg(self.task_config.jedi_yaml) try: logger.debug(f"Executing {exec_cmd}") @@ -170,7 +167,7 @@ def finalize(self: Analysis) -> None: archive.add(diaggzip, arcname=os.path.basename(diaggzip)) # copy full YAML from executable to ROTDIR - logger.info(f"Copying {self.task_config.fv3jedi_yaml} to {self.task_config.COM_ATMOS_ANALYSIS}") + logger.info(f"Copying {self.task_config.jedi_yaml} to {self.task_config.COM_ATMOS_ANALYSIS}") src = os.path.join(self.task_config.DATA, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmvar.yaml") dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmvar.yaml") logger.debug(f"Copying {src} to {dest}") diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index 3e2c0a233c..1037b557c2 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -31,7 +31,7 @@ def __init__(self, config): _res = int(self.config.CASE_ENS[1:]) _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config.assim_freq}H") / 2) - _fv3jedi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.atmens.yaml") + _jedi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.atmens.yaml") # Create a local dictionary that is repeatedly used across this class local_dict = AttrDict( @@ -45,7 +45,7 @@ def __init__(self, config): 'OPREFIX': f"{self.config.EUPD_CYC}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN 'APREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN 'GPREFIX': f"gdas.t{self.runtime_config.previous_cycle.hour:02d}z.", - 'fv3jedi_yaml': _fv3jedi_yaml, + 'jedi_yaml': _jedi_yaml, } ) @@ -96,19 +96,17 @@ def initialize(self: Analysis) -> None: FileHandler({'mkdir': dirlist}).sync() # stage CRTM fix files - crtm_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'gdas', 'atm_crtm_coeff.yaml') - logger.debug(f"Staging CRTM fix files from {crtm_fix_list_path}") - crtm_fix_list = parse_j2yaml(crtm_fix_list_path, self.task_config) + logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}") + crtm_fix_list = parse_j2yaml(self.task_config.CRTM_FIX_YAML, self.task_config) FileHandler(crtm_fix_list).sync() # stage fix files - jedi_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'gdas', 'atm_jedi_fix.yaml') - logger.debug(f"Staging JEDI fix files from {jedi_fix_list_path}") - jedi_fix_list = parse_j2yaml(jedi_fix_list_path, self.task_config) + logger.info(f"Staging JEDI fix files from {self.task_config.JEDI_FIX_YAML}") + jedi_fix_list = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config) FileHandler(jedi_fix_list).sync() # stage backgrounds - logger.debug(f"Stage ensemble member background files") + logger.info(f"Stage ensemble member background files") localconf = AttrDict() keys = ['COM_ATMOS_RESTART_TMPL', 'previous_cycle', 'ROTDIR', 'RUN', 'NMEM_ENS', 'DATA', 'current_cycle', 'ntiles'] @@ -118,10 +116,9 @@ def initialize(self: Analysis) -> None: FileHandler(self.get_fv3ens_dict(localconf)).sync() # generate ensemble da YAML file - logger.debug(f"Generate ensemble da YAML file: {self.task_config.fv3jedi_yaml}") - ensda_yaml = parse_j2yaml(self.task_config.JEDIYAML, self.task_config, searchpath=self.gdasapp_j2tmpl_dir) - save_as_yaml(ensda_yaml, self.task_config.fv3jedi_yaml) - logger.info(f"Wrote ensemble da YAML to: {self.task_config.fv3jedi_yaml}") + logger.debug(f"Generate ensemble da YAML file: {self.task_config.jedi_yaml}") + save_as_yaml(self.task_config.jedi_config, self.task_config.jedi_yaml) + logger.info(f"Wrote ensemble da YAML to: {self.task_config.jedi_yaml}") # need output dir for diags and anl logger.debug("Create empty output [anl, diags] directories to receive output from executable") @@ -153,7 +150,7 @@ def execute(self: Analysis) -> None: exec_cmd = Executable(self.task_config.APRUN_ATMENSANL) exec_name = os.path.join(self.task_config.DATA, 'fv3jedi_letkf.x') exec_cmd.add_default_arg(exec_name) - exec_cmd.add_default_arg(self.task_config.fv3jedi_yaml) + exec_cmd.add_default_arg(self.task_config.jedi_yaml) try: logger.debug(f"Executing {exec_cmd}") @@ -206,7 +203,7 @@ def finalize(self: Analysis) -> None: archive.add(diaggzip, arcname=os.path.basename(diaggzip)) # copy full YAML from executable to ROTDIR - logger.info(f"Copying {self.task_config.fv3jedi_yaml} to {self.task_config.COM_ATMOS_ANALYSIS_ENS}") + logger.info(f"Copying {self.task_config.jedi_yaml} to {self.task_config.COM_ATMOS_ANALYSIS_ENS}") src = os.path.join(self.task_config.DATA, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmens.yaml") dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS_ENS, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmens.yaml") logger.debug(f"Copying {src} to {dest}") diff --git a/ush/python/pygfs/task/snow_analysis.py b/ush/python/pygfs/task/snow_analysis.py index 9eee8314c3..c149f140b6 100644 --- a/ush/python/pygfs/task/snow_analysis.py +++ b/ush/python/pygfs/task/snow_analysis.py @@ -260,20 +260,18 @@ def initialize(self) -> None: FileHandler({'mkdir': dirlist}).sync() # stage fix files - jedi_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'gdas', 'snow_jedi_fix.yaml.j2') - logger.info(f"Staging JEDI fix files from {jedi_fix_list_path}") - jedi_fix_list = parse_j2yaml(jedi_fix_list_path, self.task_config) + logger.info(f"Staging JEDI fix files from {self.task_config.JEDI_FIX_YAML}") + jedi_fix_list = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config) FileHandler(jedi_fix_list).sync() # stage backgrounds logger.info("Staging ensemble backgrounds") FileHandler(self.get_ens_bkg_dict(localconf)).sync() - # generate letkfoi YAML file - logger.info(f"Generate JEDI LETKF YAML file: {self.task_config.jedi_yaml}") - letkfoi_yaml = parse_j2yaml(self.task_config.JEDIYAML, self.task_config, searchpath=self.gdasapp_j2tmpl_dir) - save_as_yaml(letkfoi_yaml, self.task_config.jedi_yaml) + # Write out letkfoi YAML file + save_as_yaml(self.task_config.jedi_config, self.task_config.jedi_yaml) logger.info(f"Wrote letkfoi YAML to: {self.task_config.jedi_yaml}") + # need output dir for diags and anl logger.info("Create empty output [anl, diags] directories to receive output from executable") newdirs = [