diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aero b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aero new file mode 100644 index 0000000000..32993554b4 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aero @@ -0,0 +1,46 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Turn off warnings about unused variables +# shellcheck disable=SC2034 + + +# Path to the input data tree +case ${machine} in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION" | "HERCULES") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine ${machine} unsupported for aerosols" + exit 2 + ;; +esac +export AERO_INPUTS_DIR + +export AERO_DIAG_TABLE="${HOMEgfs}/parm/ufs/fv3/diag_table.aero" +export AERO_FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table.aero" +# Biomass burning emission dataset. Choose from: gbbepx, qfed, none +export AERO_EMIS_FIRE="qfed" +# Directory containing GOCART configuration files +export AERO_CONFIG_DIR="${HOMEgfs}/parm/ufs/gocart" + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +export fscav_aero="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +export dnats_aero=2 diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aeroanl b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aeroanl new file mode 100644 index 0000000000..634d8c55b2 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aeroanl @@ -0,0 +1,30 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FIXgdas=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FIXgdas}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x + +if [[ "${DOIAU}" == "YES" ]]; then + export aero_bkg_times="3,6,9" + export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_fgat_gfs_aero.yaml +else + export aero_bkg_times="6" + export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +fi + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aeroanlfinal b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aeroanlinit b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aeroanlrun b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aerosol_init b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.anal b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.anal new file mode 100644 index 0000000000..e3a17f9c6a --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=45,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${PDY}${cyc}" -ge "2020052612" && "${PDY}${cyc}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${PDY}${cyc}" -ge "2020082412" && "${PDY}${cyc}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${PDY}${cyc}" -ge "2020091612" && "${PDY}${cyc}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${PDY}${cyc}" -ge "2021031712" && "${PDY}${cyc}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${PDY}${cyc}" -ge "2020011600" && "${PDY}${cyc}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${PDY}${cyc}" -ge "2020022012" && "${PDY}${cyc}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${PDY}${cyc}" -ge "2021052118" && "${PDY}${cyc}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${PDY}${cyc}" -ge "2021092206" && "${PDY}${cyc}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.analcalc b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.analdiag b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.arch b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.arch new file mode 100644 index 0000000000..a23bcce6ae --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.arch @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. "${EXPDIR}/config.resources" arch + +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} + +echo "END: config.arch" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmanl b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmanl new file mode 100644 index 0000000000..59ea7072a6 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmanl @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmanlfinal b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmanlinit b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmanlrun b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmensanl b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmensanl new file mode 100644 index 0000000000..6b5da7699b --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmensanl @@ -0,0 +1,18 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmensanlfinal b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmensanlfinal new file mode 100644 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmensanlinit b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmensanlinit new file mode 100644 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmensanlrun b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmensanlrun new file mode 100644 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmos_products b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmos_products new file mode 100644 index 0000000000..c3e861b281 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmos_products @@ -0,0 +1,35 @@ +#! /usr/bin/env bash + +########## config.atmos_products ########## +# atmosphere grib2 products specific + +echo "BEGIN: config.atmos_products" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmos_products + +# No. of forecast hours to process in a single job +export NFHRS_PER_GROUP=3 + +# Scripts used by this job +export INTERP_ATMOS_MASTERSH="${HOMEgfs}/ush/interp_atmos_master.sh" +export INTERP_ATMOS_SFLUXSH="${HOMEgfs}/ush/interp_atmos_sflux.sh" + +if [[ "${RUN:-}" == "gdas" ]]; then + export downset=1 + export FHOUT_PGBS=${FHOUT:-1} # Output frequency of supplemental gfs pgb file at 1.0 and 0.5 deg + export FLXGF="NO" # Create interpolated sflux.1p00 file +elif [[ "${RUN:-}" == "gfs" ]]; then + #JKHexport downset=2 ## create pgrb2b files + export downset=1 ## JKH + export FHOUT_PGBS=${FHOUT_GFS:-3} # Output frequency of supplemental gfs pgb file at 1.0 and 0.5 deg + export FLXGF="NO" # Create interpolated sflux.1p00 file +fi + +# paramlist files for the different forecast hours and downsets +export paramlista="${HOMEgfs}/parm/post/global_1x1_paramlist_g2" +export paramlista_anl="${HOMEgfs}/parm/post/global_1x1_paramlist_g2.anl" +export paramlista_f000="${HOMEgfs}/parm/post/global_1x1_paramlist_g2.f000" +export paramlistb="${HOMEgfs}/parm/post/global_master-catchup_parmlist_g2" + +echo "END: config.atmos_products" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.awips b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.awips new file mode 100644 index 0000000000..3b78d4bb4b --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. "${EXPDIR}/config.resources" awips + +export AWIPS20KM1P0DEGSH="${HOMEgfs}/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG" +export AWIPSG2SH="${HOMEgfs}/jobs/JGFS_ATMOS_AWIPS_G2" + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.base b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.base new file mode 100644 index 0000000000..4468485d95 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.base @@ -0,0 +1,406 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="HERA" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="hera" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +export HPSS_PROJECT="fim" + +# Directories relative to installation areas: +export HOMEgfs=/scratch1/BMC/gsd-fv3/rtruns/UFS-CAMsuite_dev1 +export PARMgfs="${HOMEgfs}/parm" +export FIXgfs="${HOMEgfs}/fix" +export USHgfs="${HOMEgfs}/ush" +export UTILgfs="${HOMEgfs}/util" +export EXECgfs="${HOMEgfs}/exec" +export SCRgfs="${HOMEgfs}/scripts" + +export FIXam="${FIXgfs}/am" +export FIXaer="${FIXgfs}/aer" +export FIXcpl="${FIXgfs}/cpl" +export FIXlut="${FIXgfs}/lut" +export FIXorog="${FIXgfs}/orog" +export FIXcice="${FIXgfs}/cice" +export FIXmom="${FIXgfs}/mom6" +export FIXreg2grb2="${FIXgfs}/reg2grb2" +export FIXugwd="${FIXgfs}/ugwd" + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/scratch1/NCEPDEV/global/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/scratch1/NCEPDEV/global/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/scratch1/NCEPDEV/global/glopara/dump" +export BASE_CPLIC="/scratch1/NCEPDEV/global/glopara/data/ICSDIR/prototype_ICs" + +# USER specific paths +export HOMEDIR="/scratch1/BMC/gsd-fv3/NCEPDEV/global/${USER}" +export STMP="/scratch1/BMC/gsd-fv3/rtruns/UFS-CAMsuite_dev1/FV3GFSrun/" +export PTMP="/scratch1/BMC/gsd-fv3/rtruns/UFS-CAMsuite_dev1/FV3GFSrun/" +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/scratch1/NCEPDEV/global/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_GOES="NO" # GOES products +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export DO_NPOESS="NO" # NPOESS products +export DO_TRACKER="YES" # Hurricane track verification +export DO_GENESIS="YES" # Cyclone genesis verification +export DO_GENESIS_FSU="NO" # Cyclone genesis verification (FSU) +export DO_VERFOZN="YES" # Ozone data assimilation monitoring +export DO_VERFRAD="YES" # Radiance data assimilation monitoring +export DO_VMINMON="YES" # GSI minimization monitoring +export DO_MOS="NO" # GFS Model Output Statistics - Only supported on WCOSS2 + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT:-}/obsproc/v${obsproc_run_ver:-1.1.2}" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="${NETCDF:-${netcdf_c_ROOT:-}}/bin/ncdump" +export NCLEN="${HOMEgfs}/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2024011400 +export EDATE=2024011400 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="rt_v17p8_ugwpv1_c3_mynn" +export EXPDIR="/home/role.rtfim/UFS-CAMsuite_dev1//FV3GFSwfm/${PSLOT}" +export ROTDIR="/home/role.rtfim/UFS-CAMsuite_dev1//FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${PDY}${cyc}" -ge "2019092100" && "${PDY}${cyc}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/2year/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT:-}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change # TODO: Why is this needed and where is it used? + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENS="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENS +# These are the currently available grid-combinations +case "${CASE}" in + "C48") + export OCNRES=500 + export waveGRD='glo_500' + ;; + "C96") + export OCNRES=500 + export waveGRD='glo_200' + ;; + "C192") + export OCNRES=050 + export waveGRD='glo_200' + ;; + "C384") + export OCNRES=025 + export waveGRD='glo_025' + ;; + "C768" | "C1152") + export OCNRES=025 + export waveGRD='mx025' + ;; + *) + echo "FATAL ERROR: Unrecognized CASE ${CASE}, ABORT!" + exit 1 + ;; +esac +export ICERES=${OCNRES} + +case "${APP}" in + ATM) + ;; + ATMA) + export DO_AERO="YES" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + fi + ;; + *) + echo "Unrecognized APP: '${APP}'" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=120 +export FHMAX_GFS_06=120 +export FHMAX_GFS_12=120 +export FHMAX_GFS_18=120 +current_fhmax_var=FHMAX_GFS_${cyc}; declare -x FHMAX_GFS=${!current_fhmax_var} + +export FHOUT_GFS=6 # Must be 6 for S2S until #1629 is addressed; 3 for ops +export FHMAX_HF_GFS=0 +export FHOUT_HF_GFS=1 +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +#JKHexport restart_interval_gfs=12 +export restart_interval_gfs=-1 ## JKH +# NOTE: Do not set this to zero. Instead set it to $FHMAX_GFS +# TODO: Remove this variable from config.base and reference from config.fcst +# TODO: rework logic in config.wave and push it to parsing_nameslist_WW3.sh where it is actually used + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=${IAUFHRS%%,*} +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIATMVAR="NO" +export DO_JEDIATMENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENS=@NMEM_ENS@ +export NMEM_ENS_GFS=@NMEM_ENS@ +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [[ ${l4densvar} = ".true." ]]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 + export IAUFHRS="6" +fi + +if [[ "${DOIAU_ENKF}" = "NO" ]]; then export IAUFHRS_ENKF="6"; fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in DA job +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp; not supported with spack-stack +export DO_FIT2OBS="YES" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "${FHMAX_FITS}" -gt "${FHMAX_GFS}" ]] && export FHMAX_FITS=${FHMAX_GFS} + +echo "END: config.base" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.base.emc.dyn_emc b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.base.emc.dyn_emc new file mode 100644 index 0000000000..b726c1788a --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.base.emc.dyn_emc @@ -0,0 +1,406 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +export HPSS_PROJECT="@HPSS_PROJECT@" + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs="${HOMEgfs}/parm" +export FIXgfs="${HOMEgfs}/fix" +export USHgfs="${HOMEgfs}/ush" +export UTILgfs="${HOMEgfs}/util" +export EXECgfs="${HOMEgfs}/exec" +export SCRgfs="${HOMEgfs}/scripts" + +export FIXam="${FIXgfs}/am" +export FIXaer="${FIXgfs}/aer" +export FIXcpl="${FIXgfs}/cpl" +export FIXlut="${FIXgfs}/lut" +export FIXorog="${FIXgfs}/orog" +export FIXcice="${FIXgfs}/cice" +export FIXmom="${FIXgfs}/mom6" +export FIXreg2grb2="${FIXgfs}/reg2grb2" +export FIXugwd="${FIXgfs}/ugwd" + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" +export BASE_CPLIC="@BASE_CPLIC@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_GOES="NO" # GOES products +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export DO_NPOESS="NO" # NPOESS products +export DO_TRACKER="YES" # Hurricane track verification +export DO_GENESIS="YES" # Cyclone genesis verification +export DO_GENESIS_FSU="NO" # Cyclone genesis verification (FSU) +export DO_VERFOZN="YES" # Ozone data assimilation monitoring +export DO_VERFRAD="YES" # Radiance data assimilation monitoring +export DO_VMINMON="YES" # GSI minimization monitoring +export DO_MOS="NO" # GFS Model Output Statistics - Only supported on WCOSS2 + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT:-}/obsproc/v${obsproc_run_ver:-1.1.2}" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="${NETCDF:-${netcdf_c_ROOT:-}}/bin/ncdump" +export NCLEN="${HOMEgfs}/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${PDY}${cyc}" -ge "2019092100" && "${PDY}${cyc}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT:-}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change # TODO: Why is this needed and where is it used? + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENS="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENS +# These are the currently available grid-combinations +case "${CASE}" in + "C48") + export OCNRES=500 + export waveGRD='glo_500' + ;; + "C96") + export OCNRES=500 + export waveGRD='glo_200' + ;; + "C192") + export OCNRES=050 + export waveGRD='glo_200' + ;; + "C384") + export OCNRES=025 + export waveGRD='glo_025' + ;; + "C768" | "C1152") + export OCNRES=025 + export waveGRD='mx025' + ;; + *) + echo "FATAL ERROR: Unrecognized CASE ${CASE}, ABORT!" + exit 1 + ;; +esac +export ICERES=${OCNRES} + +case "${APP}" in + ATM) + ;; + ATMA) + export DO_AERO="YES" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + fi + ;; + *) + echo "Unrecognized APP: '${APP}'" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=120 +export FHMAX_GFS_06=120 +export FHMAX_GFS_12=120 +export FHMAX_GFS_18=120 +current_fhmax_var=FHMAX_GFS_${cyc}; declare -x FHMAX_GFS=${!current_fhmax_var} + +export FHOUT_GFS=6 # Must be 6 for S2S until #1629 is addressed; 3 for ops +export FHMAX_HF_GFS=0 +export FHOUT_HF_GFS=1 +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +#JKHexport restart_interval_gfs=12 +export restart_interval_gfs=-1 ## JKH +# NOTE: Do not set this to zero. Instead set it to $FHMAX_GFS +# TODO: Remove this variable from config.base and reference from config.fcst +# TODO: rework logic in config.wave and push it to parsing_nameslist_WW3.sh where it is actually used + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="@DOIAU@" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=${IAUFHRS%%,*} +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIATMVAR="@DO_JEDIATMVAR@" +export DO_JEDIATMENS="@DO_JEDIATMENS@" +export DO_JEDIOCNVAR="@DO_JEDIOCNVAR@" +export DO_JEDILANDDA="@DO_JEDILANDDA@" +export DO_MERGENSST="@DO_MERGENSST@" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENS=@NMEM_ENS@ +export NMEM_ENS_GFS=@NMEM_ENS@ +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [[ ${l4densvar} = ".true." ]]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 + export IAUFHRS="6" +fi + +if [[ "${DOIAU_ENKF}" = "NO" ]]; then export IAUFHRS_ENKF="6"; fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in DA job +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp; not supported with spack-stack +export DO_FIT2OBS="YES" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "${FHMAX_FITS}" -gt "${FHMAX_GFS}" ]] && export FHMAX_FITS=${FHMAX_GFS} + +echo "END: config.base" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.base.emc.dyn_hera b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.base.emc.dyn_hera new file mode 100644 index 0000000000..2f07dc7970 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.base.emc.dyn_hera @@ -0,0 +1,406 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +export HPSS_PROJECT="@HPSS_PROJECT@" + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs="${HOMEgfs}/parm" +export FIXgfs="${HOMEgfs}/fix" +export USHgfs="${HOMEgfs}/ush" +export UTILgfs="${HOMEgfs}/util" +export EXECgfs="${HOMEgfs}/exec" +export SCRgfs="${HOMEgfs}/scripts" + +export FIXam="${FIXgfs}/am" +export FIXaer="${FIXgfs}/aer" +export FIXcpl="${FIXgfs}/cpl" +export FIXlut="${FIXgfs}/lut" +export FIXorog="${FIXgfs}/orog" +export FIXcice="${FIXgfs}/cice" +export FIXmom="${FIXgfs}/mom6" +export FIXreg2grb2="${FIXgfs}/reg2grb2" +export FIXugwd="${FIXgfs}/ugwd" + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" +export BASE_CPLIC="@BASE_CPLIC@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_GOES="NO" # GOES products +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export DO_NPOESS="NO" # NPOESS products +export DO_TRACKER="NO" # Hurricane track verification ## JKH +export DO_GENESIS="NO" # Cyclone genesis verification ## JKH +export DO_GENESIS_FSU="NO" # Cyclone genesis verification (FSU) +export DO_VERFOZN="YES" # Ozone data assimilation monitoring +export DO_VERFRAD="YES" # Radiance data assimilation monitoring +export DO_VMINMON="YES" # GSI minimization monitoring +export DO_MOS="NO" # GFS Model Output Statistics - Only supported on WCOSS2 + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT:-}/obsproc/v${obsproc_run_ver:-1.1.2}" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="${NETCDF:-${netcdf_c_ROOT:-}}/bin/ncdump" +export NCLEN="${HOMEgfs}/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${PDY}${cyc}" -ge "2019092100" && "${PDY}${cyc}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT:-}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change # TODO: Why is this needed and where is it used? + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENS="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENS +# These are the currently available grid-combinations +case "${CASE}" in + "C48") + export OCNRES=500 + export waveGRD='glo_500' + ;; + "C96") + export OCNRES=500 + export waveGRD='glo_200' + ;; + "C192") + export OCNRES=050 + export waveGRD='glo_200' + ;; + "C384") + export OCNRES=025 + export waveGRD='glo_025' + ;; + "C768" | "C1152") + export OCNRES=025 + export waveGRD='mx025' + ;; + *) + echo "FATAL ERROR: Unrecognized CASE ${CASE}, ABORT!" + exit 1 + ;; +esac +export ICERES=${OCNRES} + +case "${APP}" in + ATM) + ;; + ATMA) + export DO_AERO="YES" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + fi + ;; + *) + echo "Unrecognized APP: '${APP}'" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=120 +export FHMAX_GFS_06=120 +export FHMAX_GFS_12=120 +export FHMAX_GFS_18=120 +current_fhmax_var=FHMAX_GFS_${cyc}; declare -x FHMAX_GFS=${!current_fhmax_var} + +export FHOUT_GFS=6 # Must be 6 for S2S until #1629 is addressed; 3 for ops +export FHMAX_HF_GFS=0 +export FHOUT_HF_GFS=1 +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +#JKHexport restart_interval_gfs=12 +export restart_interval_gfs=-1 ## JKH +# NOTE: Do not set this to zero. Instead set it to $FHMAX_GFS +# TODO: Remove this variable from config.base and reference from config.fcst +# TODO: rework logic in config.wave and push it to parsing_nameslist_WW3.sh where it is actually used + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="@DOIAU@" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=${IAUFHRS%%,*} +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIATMVAR="@DO_JEDIATMVAR@" +export DO_JEDIATMENS="@DO_JEDIATMENS@" +export DO_JEDIOCNVAR="@DO_JEDIOCNVAR@" +export DO_JEDILANDDA="@DO_JEDILANDDA@" +export DO_MERGENSST="@DO_MERGENSST@" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENS=@NMEM_ENS@ +export NMEM_ENS_GFS=@NMEM_ENS@ +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [[ ${l4densvar} = ".true." ]]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 + export IAUFHRS="6" +fi + +if [[ "${DOIAU_ENKF}" = "NO" ]]; then export IAUFHRS_ENKF="6"; fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in DA job +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp; not supported with spack-stack +export DO_FIT2OBS="NO" # Run fit to observations package ## JKH + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "${FHMAX_FITS}" -gt "${FHMAX_GFS}" ]] && export FHMAX_FITS=${FHMAX_GFS} + +echo "END: config.base" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.base.emc.dyn_jet b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.base.emc.dyn_jet new file mode 100644 index 0000000000..df6498d7b3 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.base.emc.dyn_jet @@ -0,0 +1,406 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +export HPSS_PROJECT="@HPSS_PROJECT@" + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs="${HOMEgfs}/parm" +export FIXgfs="${HOMEgfs}/fix" +export USHgfs="${HOMEgfs}/ush" +export UTILgfs="${HOMEgfs}/util" +export EXECgfs="${HOMEgfs}/exec" +export SCRgfs="${HOMEgfs}/scripts" + +export FIXam="${FIXgfs}/am" +export FIXaer="${FIXgfs}/aer" +export FIXcpl="${FIXgfs}/cpl" +export FIXlut="${FIXgfs}/lut" +export FIXorog="${FIXgfs}/orog" +export FIXcice="${FIXgfs}/cice" +export FIXmom="${FIXgfs}/mom6" +export FIXreg2grb2="${FIXgfs}/reg2grb2" +export FIXugwd="${FIXgfs}/ugwd" + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" +export BASE_CPLIC="@BASE_CPLIC@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_GOES="NO" # GOES products +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export DO_NPOESS="NO" # NPOESS products +export DO_TRACKER="YES" # Hurricane track verification +export DO_GENESIS="NO" # Cyclone genesis verification ## JKH +export DO_GENESIS_FSU="NO" # Cyclone genesis verification (FSU) +export DO_VERFOZN="YES" # Ozone data assimilation monitoring +export DO_VERFRAD="YES" # Radiance data assimilation monitoring +export DO_VMINMON="YES" # GSI minimization monitoring +export DO_MOS="NO" # GFS Model Output Statistics - Only supported on WCOSS2 + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT:-}/obsproc/v${obsproc_run_ver:-1.1.2}" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="${NETCDF:-${netcdf_c_ROOT:-}}/bin/ncdump" +export NCLEN="${HOMEgfs}/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${PDY}${cyc}" -ge "2019092100" && "${PDY}${cyc}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT:-}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change # TODO: Why is this needed and where is it used? + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENS="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENS +# These are the currently available grid-combinations +case "${CASE}" in + "C48") + export OCNRES=500 + export waveGRD='glo_500' + ;; + "C96") + export OCNRES=500 + export waveGRD='glo_200' + ;; + "C192") + export OCNRES=050 + export waveGRD='glo_200' + ;; + "C384") + export OCNRES=025 + export waveGRD='glo_025' + ;; + "C768" | "C1152") + export OCNRES=025 + export waveGRD='mx025' + ;; + *) + echo "FATAL ERROR: Unrecognized CASE ${CASE}, ABORT!" + exit 1 + ;; +esac +export ICERES=${OCNRES} + +case "${APP}" in + ATM) + ;; + ATMA) + export DO_AERO="YES" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + fi + ;; + *) + echo "Unrecognized APP: '${APP}'" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=168 ## JKH +export FHMAX_GFS_06=168 ## JKH +export FHMAX_GFS_12=168 ## JKH +export FHMAX_GFS_18=168 ## JKH +current_fhmax_var=FHMAX_GFS_${cyc}; declare -x FHMAX_GFS=${!current_fhmax_var} + +export FHOUT_GFS=6 # Must be 6 for S2S until #1629 is addressed; 3 for ops +export FHMAX_HF_GFS=0 +export FHOUT_HF_GFS=1 +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +#JKHexport restart_interval_gfs=12 +export restart_interval_gfs=-1 ## JKH +# NOTE: Do not set this to zero. Instead set it to $FHMAX_GFS +# TODO: Remove this variable from config.base and reference from config.fcst +# TODO: rework logic in config.wave and push it to parsing_nameslist_WW3.sh where it is actually used + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="@DOIAU@" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=${IAUFHRS%%,*} +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIATMVAR="@DO_JEDIATMVAR@" +export DO_JEDIATMENS="@DO_JEDIATMENS@" +export DO_JEDIOCNVAR="@DO_JEDIOCNVAR@" +export DO_JEDILANDDA="@DO_JEDILANDDA@" +export DO_MERGENSST="@DO_MERGENSST@" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENS=@NMEM_ENS@ +export NMEM_ENS_GFS=@NMEM_ENS@ +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [[ ${l4densvar} = ".true." ]]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 + export IAUFHRS="6" +fi + +if [[ "${DOIAU_ENKF}" = "NO" ]]; then export IAUFHRS_ENKF="6"; fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in DA job +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp; not supported with spack-stack +export DO_FIT2OBS="NO" # Run fit to observations package ## JKH + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "${FHMAX_FITS}" -gt "${FHMAX_GFS}" ]] && export FHMAX_FITS=${FHMAX_GFS} + +echo "END: config.base" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.cleanup b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.cleanup new file mode 100644 index 0000000000..1908c91bb5 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.cleanup @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +########## config.cleanup ########## +echo "BEGIN: config.cleanup" + +# Get task specific resources +source "${EXPDIR}/config.resources" cleanup + +export CLEANUP_COM="YES" # NO=retain ROTDIR. YES default in cleanup.sh + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +# Specify the list of files to exclude from the first stage of cleanup +# Because arrays cannot be exported, list is a single string of comma- +# separated values. This string is split to form an array at runtime. +case ${RUN} in + gdas | gfs) exclude_string="*prepbufr*, *cnvstat*, *atmanl.nc" ;; + enkf*) exclude_string="*f006.ens*" ;; + *) exclude_string="" ;; +esac +export exclude_string + +echo "END: config.cleanup" \ No newline at end of file diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.com b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.com new file mode 100644 index 0000000000..208b0ac096 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.com @@ -0,0 +1,96 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_CONF_TMPL=${COM_BASE}'/conf' +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_LAND_ANALYSIS_TMPL=${COM_BASE}'/analysis/land' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2' +declare -rx COM_ATMOS_GRIB_GRID_TMPL=${COM_ATMOS_GRIB_TMPL}'/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_OZNMON_TMPL=${COM_BASE}'/products/atmos/oznmon' +declare -rx COM_ATMOS_RADMON_TMPL=${COM_BASE}'/products/atmos/radmon' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2' +declare -rx COM_OCEAN_GRIB_GRID_TMPL=${COM_OCEAN_GRIB_TMPL}'/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.earc b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ecen b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.echgres b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ediag b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.efcs b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.efcs new file mode 100644 index 0000000000..283ec3ab7e --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.efcs @@ -0,0 +1,72 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# Turn off components in ensemble via _ENKF, or use setting from deterministic +export DO_AERO=${DO_AERO_ENKF:-${DO_AERO:-"NO"}} +export DO_OCN=${DO_OCN_ENKF:-${DO_OCN:-"NO"}} +export DO_ICE=${DO_ICE_ENKF:-${DO_ICE:-"NO"}} +export DO_WAVE=${DO_WAVE_ENKF:-${DO_WAVE:-"NO"}} + +# Source model specific information that is resolution dependent +string="--fv3 ${CASE_ENS}" +# Ocean/Ice/Waves ensemble configurations are identical to deterministic member +[[ "${DO_OCN}" == "YES" ]] && string="${string} --mom6 ${OCNRES}" +[[ "${DO_ICE}" == "YES" ]] && string="${string} --cice6 ${ICERES}" +[[ "${DO_WAVE}" == "YES" ]] && string="${string} --ww3 ${waveGRD// /;}" +[[ "${DO_AERO}" == "YES" ]] && string="${string} --gocart" +# We are counting on $string being multiple arguments +# shellcheck disable=SC2086 +source "${EXPDIR}/config.ufs" ${string} + +# Get task specific resources +. "${EXPDIR}/config.resources" efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [[ "${QUILTING}" = ".true." ]] && [[ "${OUTPUT_GRID}" = "gaussian_grid" ]]; then + export DIAG_TABLE="${HOMEgfs}/parm/ufs/fv3/diag_table_da" +else + export DIAG_TABLE="${HOMEgfs}/parm/ufs/fv3/diag_table_da_orig" +fi + +# For IAU, write restarts at beginning of window also +if [[ "${DOIAU_ENKF:-}" = "YES" ]]; then + export restart_interval="3" +else + export restart_interval="6" +fi + +echo "END: config.efcs" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.eobs b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.epos b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.esfc b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.eupd b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.fcst b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.fcst new file mode 120000 index 0000000000..8a4c14d506 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.fcst @@ -0,0 +1 @@ +config.fcst_dt-inner=75 \ No newline at end of file diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.fcst_dt-inner=150 b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.fcst_dt-inner=150 new file mode 100644 index 0000000000..c8a12885e1 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.fcst_dt-inner=150 @@ -0,0 +1,336 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case ${WAVE_CDUMP} in + both | "${CDUMP/enkf}" ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 ${CASE}" +[[ "${DO_OCN}" == "YES" ]] && string="${string} --mom6 ${OCNRES}" +[[ "${DO_ICE}" == "YES" ]] && string="${string} --cice6 ${ICERES}" +[[ "${DO_WAVE}" == "YES" ]] && string="${string} --ww3 ${waveGRD// /;}" +[[ "${DO_AERO}" == "YES" ]] && string="${string} --gocart" +# We are counting on $string being multiple arguments +# shellcheck disable=SC2086 +source "${EXPDIR}/config.ufs" ${string} + + +# Get task specific resources +source "${EXPDIR}/config.resources" fcst +export domains_stack_size="16000000" + + +if [[ "${DONST}" == "YES" ]]; then + source "${EXPDIR}/config.nsst" +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### + +export FORECASTSH="${HOMEgfs}/scripts/exglobal_forecast.sh" +#export FORECASTSH="${HOMEgfs}/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="${HOMEgfs}/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" +#JKHexport range_warn=".false." ## JKH + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if (( gwd_opt == 1 )); then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + launch_level=$(echo "${LEVS}/2.35" |bc) + export launch_level +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if (( gwd_opt == 2 )); then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=1 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".false." + export do_ugwp_v1=".true." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".true." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".true." + export do_ugwp_v1_orog_only=".false." + launch_level=$(echo "${LEVS}/2.35" |bc) + export launch_level + if [[ ${do_gsl_drag_ls_bl} == ".true." ]]; then + export cdmbgwd=${cdmbgwd_gsl} + fi +fi + +# Sponge layer settings +export tau=0. +export rf_cutoff=10. +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if (( LEVS == 128 )) && [[ "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_c3_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + export shal_cnv=".false." + export do_mynnedmf=".true." + export do_mynnsfclay=".false." + export icloud_bl=1 + export bl_mynn_tkeadvect=".true." + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 + export lcnorm=".true." ## JKH +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "${satmedmf}" == ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "${progsigma}" == ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +export progsigma=".true." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_c3_mynn" ]] ; then + export imfdeepcnv=5 + export imfshalcnv=-1 ## JKH - no shallow GF +elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_c3" ]] ; then + export imfdeepcnv=5 + export imfshalcnv=5 +elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then + export progsigma=.false. + export imfdeepcnv=5 + export imfshalcnv=5 +elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + else + export imfshalcnv=2 + fi +fi + +#Convection schemes ### JKH - affects field table name +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +case ${imp_physics} in + 99) # ZhaoCarr + export ncld=1 + export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + ;; + 6) # WSM6 + export ncld=2 + export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_wsm6${tbf}${tbp}" + export nwat=6 + ;; + 8) # Thompson + export ncld=2 + export nwat=6 + + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3_mynn" || + "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_c3_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_thompson" ]] ; then + export ltaerosol=".true." + export FIELD_TABLE="$HOMEgfs/parm/ufs/fv3/field_table_thompson_aero_tke${tbp}" + else + export ltaerosol=".false." + export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_thompson_noaero_tke${tbp}" + fi + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export lradar=".true." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "${sedi_semi}" == .true. ]]; then export dt_inner=${DELTIM} ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + ;; + 11) # GFDL + export ncld=5 + export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + ;; + *) echo "Unknown microphysics option, ABORT!" ;; +esac + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="ufs.frac" +if [[ "${FRAC_GRID:-".true."}" == ".false." ]]; then + export cplmode="ufs.nfrac" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "${CDUMP}" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="${HOMEgfs}/parm/ufs/fv3/diag_table_da" + + if [[ "${DOIAU}" == "YES" ]]; then + export restart_interval="3" + else + export restart_interval="6" + fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "${CDUMP}" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="${HOMEgfs}/parm/ufs/fv3/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval=${restart_interval_gfs:-12} + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "${CASE}" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.fcst_dt-inner=75 b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.fcst_dt-inner=75 new file mode 100644 index 0000000000..a4c4ee8072 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.fcst_dt-inner=75 @@ -0,0 +1,338 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case ${WAVE_CDUMP} in + both | "${CDUMP/enkf}" ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 ${CASE}" +[[ "${DO_OCN}" == "YES" ]] && string="${string} --mom6 ${OCNRES}" +[[ "${DO_ICE}" == "YES" ]] && string="${string} --cice6 ${ICERES}" +[[ "${DO_WAVE}" == "YES" ]] && string="${string} --ww3 ${waveGRD// /;}" +[[ "${DO_AERO}" == "YES" ]] && string="${string} --gocart" +# We are counting on $string being multiple arguments +# shellcheck disable=SC2086 +source "${EXPDIR}/config.ufs" ${string} + + +# Get task specific resources +source "${EXPDIR}/config.resources" fcst +export domains_stack_size="16000000" + + +if [[ "${DONST}" == "YES" ]]; then + source "${EXPDIR}/config.nsst" +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### + +export FORECASTSH="${HOMEgfs}/scripts/exglobal_forecast.sh" +#export FORECASTSH="${HOMEgfs}/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="${HOMEgfs}/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" +#JKHexport range_warn=".false." ## JKH + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if (( gwd_opt == 1 )); then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + launch_level=$(echo "${LEVS}/2.35" |bc) + export launch_level +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if (( gwd_opt == 2 )); then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=1 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".false." + export do_ugwp_v1=".true." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".true." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".true." + export do_ugwp_v1_orog_only=".false." + launch_level=$(echo "${LEVS}/2.35" |bc) + export launch_level + if [[ ${do_gsl_drag_ls_bl} == ".true." ]]; then + export cdmbgwd=${cdmbgwd_gsl} + fi +fi + +# Sponge layer settings +export tau=0. +export rf_cutoff=10. +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if (( LEVS == 128 )) && [[ "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_c3_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + export shal_cnv=".false." + export do_mynnedmf=".true." + export do_mynnsfclay=".false." + export icloud_bl=1 + export bl_mynn_tkeadvect=".true." + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 + export lcnorm=".true." ## JKH +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "${satmedmf}" == ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "${progsigma}" == ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +export progsigma=".true." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_c3_mynn" ]] ; then + export imfdeepcnv=5 + export imfshalcnv=-1 ## JKH - no shallow GF +elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_c3" ]] ; then + export imfdeepcnv=5 + export imfshalcnv=5 +elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then + export progsigma=.false. + export imfdeepcnv=5 + export imfshalcnv=5 +elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + else + export imfshalcnv=2 + fi +fi + +#Convection schemes ### JKH - affects field table name +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +case ${imp_physics} in + 99) # ZhaoCarr + export ncld=1 + export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + ;; + 6) # WSM6 + export ncld=2 + export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_wsm6${tbf}${tbp}" + export nwat=6 + ;; + 8) # Thompson + export ncld=2 + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export lradar=".true." + export ttendlim="-999" + export sedi_semi=.true. + export decfl=10 + + if [[ "${CCPP_SUITE}" == "FV3_GFS_v17_p8_ugwpv1_mynn" || "${CCPP_SUITE}" == "FV3_GFS_v17_p8_ugwpv1_c3_mynn" || "${CCPP_SUITE}" == "FV3_GFS_v17_p8_mynn" || "${CCPP_SUITE}" == "FV3_GFS_v17_p8_c3_mynn" || + "${CCPP_SUITE}" == "FV3_GFS_v17_p8_thompson" ]] ; then + #JKH keep dt_inner $DELTIM/2 (75) if running aerosol-aware Thompson + export dt_inner=$((DELTIM/2)) + export ltaerosol=".true." + export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_thompson_aero_tke${tbp}" + else + export dt_inner=$((DELTIM/2)) + if [[ "${sedi_semi}" == .true. ]]; then export dt_inner=${DELTIM} ; fi + export ltaerosol=".false." + export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_thompson_noaero_tke${tbp}" + fi + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + ;; + 11) # GFDL + export ncld=5 + export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + ;; + *) echo "Unknown microphysics option, ABORT!" ;; +esac + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="ufs.frac" +if [[ "${FRAC_GRID:-".true."}" == ".false." ]]; then + export cplmode="ufs.nfrac" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "${CDUMP}" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="${HOMEgfs}/parm/ufs/fv3/diag_table_da" + + if [[ "${DOIAU}" == "YES" ]]; then + export restart_interval="3" + else + export restart_interval="6" + fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "${CDUMP}" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="${HOMEgfs}/parm/ufs/fv3/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval=${restart_interval_gfs:-12} + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "${CASE}" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.fit2obs b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.gempak b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.genesis b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.genesis new file mode 100644 index 0000000000..62a1bf88c0 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.genesis @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.genesis ########## +echo "BEGIN: config.genesis" + +# Get task specific resources +. "${EXPDIR}/config.resources" genesis + +# Get tropcy settings +. "${EXPDIR}/config.tropcy" + +echo "END: config.genesis" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.genesis_fsu b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.genesis_fsu new file mode 100644 index 0000000000..13948592c4 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.genesis_fsu @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.genesis_fsu ########## +echo "BEGIN: config.genesis_fsu" + +# Get task specific resources +. "${EXPDIR}/config.resources" genesis_fsu + +# Get tropcy settings +. "${EXPDIR}/config.tropcy" + +echo "END: config.genesis_fsu" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.getic b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.getic new file mode 100644 index 0000000000..d51e2d3900 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDAS_INIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDAS_INIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDAS_INIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDAS_INIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDAS_INIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDAS_INIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDAS_INIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ice b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ice new file mode 100644 index 0000000000..205458020f --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ice @@ -0,0 +1,9 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +# Override atm-only FV3 settings when ice model is on +export min_seaice="1.0e-6" +export use_cice_alb=".true." + +echo "END: config.ice" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.init b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.init new file mode 100644 index 0000000000..fccbc719db --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.init @@ -0,0 +1,55 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDAS_INIT_DIR=${UFS_DIR}/util/gdas_init +export EXEC_DIR=${UFS_DIR}/exec + +export CRES_HIRES=$CASE +export CRES_ENKF="" +export FRAC_ORO="yes" + +export RUNICSH=${GDAS_INIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDAS_INIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDAS_INIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDAS_INIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDAS_INIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDAS_INIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.landanl b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.landanl new file mode 100644 index 0000000000..8d49f10d76 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.landanl @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to land analysis tasks + +echo "BEGIN: config.landanl" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanl + +obs_list_name=gdas_land_gts_only.yaml +if [[ "${cyc}" = "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} + +# Name of the JEDI executable and its yaml template +export JEDIEXE="${HOMEgfs}/exec/fv3jedi_letkf.x" +export JEDIYAML="${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml" + +# Ensemble member properties +export SNOWDEPTHVAR="snodl" +export BESTDDEV="30." # Background Error Std. Dev. for LETKFOI + +# Name of the executable that applies increment to bkg and its namelist template +export APPLY_INCR_EXE="${HOMEgfs}/exec/apply_incr.exe" +export APPLY_INCR_NML_TMPL="${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/apply_incr_nml.j2" + +export io_layout_x=1 +export io_layout_y=1 + +echo "END: config.landanl" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.metp b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos new file mode 100644 index 0000000000..a74c7e7d21 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos @@ -0,0 +1,9 @@ +#! /usr/bin/env bash + +########## config.mos ########## +echo "BEGIN: config.mos" + +# MOS package location +export HOMEgfs_mos=/lfs/h1/ops/prod/packages/gfs_mos.v${mos_ver} + +echo "END: config.mos" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_ext_grd_fcst b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_ext_grd_fcst new file mode 100644 index 0000000000..db94af945f --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_ext_grd_fcst @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_ext_grd_fcst ########## +echo "BEGIN: config.mos_ext_grd_fcst" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_ext_grd_fcst + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_ext_grd_fcst" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_ext_grd_prdgen b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_ext_grd_prdgen new file mode 100644 index 0000000000..ade31b0c1a --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_ext_grd_prdgen @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_ext_grd_prdgen ########## +echo "BEGIN: config.mos_ext_grd_prdgen" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_ext_grd_prdgen + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_ext_grd_prdgen" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_ext_grd_prep b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_ext_grd_prep new file mode 100644 index 0000000000..0ba14e2573 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_ext_grd_prep @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_ext_grd_prep ########## +echo "BEGIN: config.mos_ext_grd_prep" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_ext_grd_prep + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_ext_grd_prep" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_ext_stn_fcst b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_ext_stn_fcst new file mode 100644 index 0000000000..5b26d196f9 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_ext_stn_fcst @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_ext_stn_fcst ########## +echo "BEGIN: config.mos_ext_stn_fcst" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_ext_stn_fcst + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_ext_stn_fcst" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_ext_stn_prdgen b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_ext_stn_prdgen new file mode 100644 index 0000000000..9f63eb56fd --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_ext_stn_prdgen @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_ext_stn_prdgen ########## +echo "BEGIN: config.mos_ext_stn_prdgen" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_ext_stn_prdgen + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_ext_stn_prdgen" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_ext_stn_prep b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_ext_stn_prep new file mode 100644 index 0000000000..c443503f11 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_ext_stn_prep @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_ext_stn_prep ########## +echo "BEGIN: config.mos_ext_stn_prep" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_ext_stn_prep + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_ext_stn_prep" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_grd_fcst b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_grd_fcst new file mode 100644 index 0000000000..bd0d50a04d --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_grd_fcst @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_grd_fcst ########## +echo "BEGIN: config.mos_grd_fcst" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_grd_fcst + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_grd_fcst" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_grd_prdgen b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_grd_prdgen new file mode 100644 index 0000000000..dd9ce8bcd8 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_grd_prdgen @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_grd_prdgen ########## +echo "BEGIN: config.mos_grd_prdgen" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_grd_prdgen + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_grd_prdgen" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_grd_prep b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_grd_prep new file mode 100644 index 0000000000..8a3d334d0d --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_grd_prep @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_grd_prep ########## +echo "BEGIN: config.mos_grd_prep" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_grd_prep + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_grd_prep" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_stn_fcst b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_stn_fcst new file mode 100644 index 0000000000..7cb266ea3a --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_stn_fcst @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_stn_fcst ########## +echo "BEGIN: config.mos_stn_fcst" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_stn_fcst + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_stn_fcst" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_stn_prdgen b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_stn_prdgen new file mode 100644 index 0000000000..f92edbd0fd --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_stn_prdgen @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_stn_prdgen ########## +echo "BEGIN: config.mos_stn_prdgen" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_stn_prdgen + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_stn_prdgen" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_stn_prep b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_stn_prep new file mode 100644 index 0000000000..b236f42879 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_stn_prep @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_stn_prep ########## +echo "BEGIN: config.mos_stn_prep" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_stn_prep + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_stn_prep" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_wx_ext_prdgen b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_wx_ext_prdgen new file mode 100644 index 0000000000..054cb950ad --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_wx_ext_prdgen @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_wx_ext_prdgen ########## +echo "BEGIN: config.mos_wx_ext_prdgen" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_wx_ext_prdgen + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_wx_ext_prdgen" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_wx_prdgen b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_wx_prdgen new file mode 100644 index 0000000000..d4481b65fc --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.mos_wx_prdgen @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_wx_prdgen ########## +echo "BEGIN: config.mos_wx_prdgen" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_wx_prdgen + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_wx_prdgen" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.npoess b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.npoess new file mode 100644 index 0000000000..9a388d2e6b --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.npoess @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.npoess ########## +# GFS NPOESS step specific + +echo "BEGIN: config.npoess" + +# Get task specific resources +. "${EXPDIR}/config.resources" npoess + +echo "END: config.npoess" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.nsst b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.nsst new file mode 100644 index 0000000000..db4367b2c0 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.nsst @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +cdate="${PDY}${cyc}" +if (( cdate < 2017072000 )); then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if (( NST_GSI > 0 )); then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocn b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocn new file mode 100644 index 0000000000..37f6a966aa --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocn @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocnanal b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocnanal new file mode 100644 index 0000000000..9deaf4a361 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocnanal @@ -0,0 +1,31 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR="${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config" +export OBS_LIST=/scratch1/BMC/gsd-fv3/rtruns/UFS-CAMsuite_dev1/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML="${OBS_LIST}" +export FV3JEDI_STAGE_YAML="${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml" +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25/soca +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=100 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size resolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2-v2-v3 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocnanalbmat b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocnanalchkpt b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocnanalpost b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocnanalprep b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocnanalrun b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocnanalvrfy b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocnpost b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocnpost new file mode 100644 index 0000000000..851c476e6c --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocnpost @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source "${EXPDIR}/config.resources" ocnpost + +# Convert netcdf files to grib files using post job +#------------------------------------------- +case "${OCNRES}" in + "025") export MAKE_OCN_GRIB="YES";; + "050") export MAKE_OCN_GRIB="NO";; + "100") export MAKE_OCN_GRIB="NO";; + "500") export MAKE_OCN_GRIB="NO";; + *) export MAKE_OCN_GRIB="NO";; +esac + +if [[ "${machine}" = "WCOSS2" ]] || [[ "${machine}" = "HERCULES" ]]; then + #Currently the conversion to netcdf uses NCL which is not on WCOSS2 or HERCULES + #This should be removed when this is updated + export MAKE_OCN_GRIB="NO" +fi + +# No. of forecast hours to process in a single job +export NFHRS_PER_GROUP=3 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.postsnd b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.prep b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.prep new file mode 100644 index 0000000000..d5ac1925f7 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.prep @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "${PDY}${cyc}" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.prepatmiodaobs b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.prepatmiodaobs new file mode 100644 index 0000000000..ed9b246120 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.prepatmiodaobs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.prepatmiodaobs ########## +# Atm Obs Prep specific + +echo "BEGIN: config.prepatmiodaobs" + +# Get task specific resources +. "${EXPDIR}/config.resources" prepatmiodaobs + +export BUFR2IODASH="${HOMEgfs}/ush/run_bufr2ioda.py" +export IODAPARM="${HOMEgfs}/sorc/gdas.cd/parm/ioda/bufr2ioda" + +echo "END: config.prepatmiodaobs" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.preplandobs b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.preplandobs new file mode 100644 index 0000000000..20ae20b5ad --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.preplandobs @@ -0,0 +1,18 @@ +#! /usr/bin/env bash + +########## config.preplandobs ########## +# Land Obs Prep specific + +echo "BEGIN: config.preplandobs" + +# Get task specific resources +. "${EXPDIR}/config.resources" preplandobs + +export GTS_OBS_LIST="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/prep_gts.yaml" +export BUFR2IODAX="${HOMEgfs}/exec/bufr2ioda.x" +export FIMS_NML_TMPL="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/fims.nml.j2" +export IMS_OBS_LIST="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/prep_ims.yaml" +export CALCFIMSEXE="${HOMEgfs}/exec/calcfIMS.exe" +export IMS2IODACONV="${HOMEgfs}/ush/imsfv3_scf2ioda.py" + +echo "END: config.preplandobs" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.prepoceanobs b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.prepoceanobs new file mode 100644 index 0000000000..068ecff1ad --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.prepoceanobs @@ -0,0 +1,17 @@ +#!/bin/bash + +########## config.prepoceanobs ########## + +echo "BEGIN: config.prepoceanobs" + +export OCNOBS2IODAEXEC=${HOMEgfs}/sorc/gdas.cd/build/bin/gdas_obsprovider2ioda.x + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBSPROC_YAML=@OBSPROC_CONFIG@ +export OBS_LIST=@SOCA_OBS_LIST@ +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} + +# Get task specific resources +. "${EXPDIR}/config.resources" prepoceanobs +echo "END: config.prepoceanobs" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.resources b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.resources new file mode 100644 index 0000000000..5d8540d7a3 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.resources @@ -0,0 +1,1189 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "stage_ic aerosol_init" + echo "prep preplandobs prepatmiodaobs" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanl" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag fcst echgres" + echo "upp atmos_products" + echo "tracker genesis genesis_fsu" + echo "verfozn verfrad vminmon fit2obs metp arch cleanup" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak npoess" + echo "ocnanalprep prepoceanobs ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ "${PARTITION_POST_BATCH}" = "sjet" ]]; then + export npe_node_max=16 + elif [[ "${PARTITION_BATCH}" = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ "${machine}" = "HERA" ]]; then + export npe_node_max=40 +elif [[ "${machine}" = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ "${machine}" = "AWSPW" ]]; then + export PARTITION_BATCH="compute" + export npe_node_max=40 +elif [[ "${machine}" = "ORION" ]]; then + export npe_node_max=40 +elif [[ "${machine}" = "HERCULES" ]]; then + export npe_node_max=40 +fi + +if [[ "${step}" = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "preplandobs" ]]; then + export wtime_preplandobs="00:05:00" + npe_preplandobs=1 + export npe_preplandobs + export nth_preplandobs=1 + npe_node_preplandobs=1 + export npe_node_preplandobs + +elif [[ "${step}" = "prepatmiodaobs" ]]; then + export wtime_prepatmiodaobs="00:10:00" + export npe_prepatmiodaobs=1 + export nth_prepatmiodaobs=1 + npe_node_prepatmiodaobs=$(echo "${npe_node_max} / ${nth_prepatmiodaobs}" | bc) + export npe_node_prepatmiodaobs + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ "${step}" = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ "${step}" = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ "${step}" = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ "${step}" = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ "${step}" = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ "${step}" = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="04:00:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ "${step}" = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ "${step}" = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + layout_gsib_x=$(echo "${layout_x} * 3" | bc) + export layout_gsib_x + layout_gsib_y=$(echo "${layout_y} * 2" | bc) + export layout_gsib_y + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanl" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_landanl="00:15:00" + npe_landanl=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanl + export nth_landanl=1 + npe_node_landanl=$(echo "${npe_node_max} / ${nth_landanl}" | bc) + export npe_node_landanl + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=8 + layout_y=8 + ;; + C384) + layout_x=8 + layout_y=8 + ;; + C192 | C96) + layout_x=8 + layout_y=8 + ;; + C48 ) + # this case is for testing only + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=8 + layout_y=8 + ;; + C384) + layout_x=8 + layout_y=8 + ;; + C192 | C96) + layout_x=8 + layout_y=8 + ;; + C48 ) + # this case is for testing only + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "prepoceanobs" ]]; then + + export wtime_prepoceanobs="00:10:00" + export npe_prepoceanobs=1 + export nth_prepoceanobs=1 + npe_node_prepoceanobs=$(echo "${npe_node_max} / ${nth_prepoceanobs}" | bc) + export npe_node_prepoceanobs + export memory_prepoceanobs="24GB" + + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C96) + npes=16 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + memory_ocnanalrun="128GB" + ;; + C96) + npes=16 + ;; + C48) + npes=16 + memory_ocnanalrun="64GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:15:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=2 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + export memory_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C96) + export memory_ocnanalchkpt="32GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ "${step}" = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ "${CASE}" = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ "${machine}" = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ "${machine}" = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ "${step}" = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + export memory_analcalc="48GB" + +elif [[ "${step}" = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ "${step}" = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-${FV3PETS}} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + CHMPETS=0; CHMTHREADS=0 + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + export CHMPETS CHMTHREADS + + WAVPETS=0; WAVTHREADS=0 + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + export WAVPETS WAVTHREADS + + OCNPETS=0; OCNTHREADS=0 + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + export OCNPETS OCNTHREADS + + ICEPETS=0; ICETHREADS=0 + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + export ICEPETS ICETHREADS + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384") + declare -x "wtime_${step}"="00:20:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ "${step}" = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ "${machine}" == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ "${step}" = "upp" ]]; then + + case "${CASE}" in + "C48" | "C96") + export npe_upp=${CASE:1} + ;; + "C192" | "C384" | "C768") + export npe_upp=120 + ;; + *) + echo "FATAL ERROR: Resolution '${CASE}' not supported for UPP'" + exit 1 + ;; + esac + export npe_node_upp=${npe_upp} + + export nth_upp=1 + + export wtime_upp="00:15:00" + if [[ "${npe_node_upp}" -gt "${npe_node_max}" ]]; then + export npe_node_upp=${npe_node_max} + fi + export is_exclusive=True + +elif [[ ${step} = "atmos_products" ]]; then + + export wtime_atmos_products="00:15:00" + export npe_atmos_products=24 + export nth_atmos_products=1 + export npe_node_atmos_products="${npe_atmos_products}" + export wtime_atmos_products_gfs="${wtime_atmos_products}" + export npe_atmos_products_gfs="${npe_atmos_products}" + export nth_atmos_products_gfs="${nth_atmos_products}" + export npe_node_atmos_products_gfs="${npe_node_atmos_products}" + export is_exclusive=True + +elif [[ ${step} = "verfozn" ]]; then + + export wtime_verfozn="00:05:00" + export npe_verfozn=1 + export nth_verfozn=1 + export npe_node_verfozn=1 + export memory_verfozn="1G" + +elif [[ ${step} = "verfrad" ]]; then + + export wtime_verfrad="00:40:00" + export npe_verfrad=1 + export nth_verfrad=1 + export npe_node_verfrad=1 + export memory_verfrad="5G" + +elif [[ ${step} = "vminmon" ]]; then + + export wtime_vminmon="00:05:00" + export npe_vminmon=1 + export nth_vminmon=1 + export npe_node_vminmon=1 + export wtime_vminmon_gfs="00:05:00" + export npe_vminmon_gfs=1 + export nth_vminmon_gfs=1 + export npe_node_vminmon_gfs=1 + export memory_vminmon="1G" + +elif [[ ${step} = "tracker" ]]; then + + export wtime_tracker="00:10:00" + export npe_tracker=1 + export nth_tracker=1 + export npe_node_tracker=1 + export memory_tracker="4G" + +elif [[ ${step} = "genesis" ]]; then + + export wtime_genesis="00:25:00" + export npe_genesis=1 + export nth_genesis=1 + export npe_node_genesis=1 + export memory_genesis="4G" + +elif [[ ${step} = "genesis_fsu" ]]; then + + export wtime_genesis_fsu="00:10:00" + export npe_genesis_fsu=1 + export nth_genesis_fsu=1 + export npe_node_genesis_fsu=1 + export memory_genesis_fsu="4G" + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ "${machine}" == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ "${step}" = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ "${step}" = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ "${step}" = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ "${step}" = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} == "cleanup" ]]; then + export wtime_cleanup="01:00:00" + export npe_cleanup=1 + export npe_node_cleanup=1 + export nth_cleanup=1 + export memory_cleanup="4096M" + +elif [[ ${step} = "stage_ic" ]]; then + + export wtime_stage_ic="00:15:00" + export npe_stage_ic=1 + export npe_node_stage_ic=1 + export nth_stage_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ "${step}" = "eobs" || "${step}" = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ "${CASE}" = "C768" ]]; then + export npe_eobs=200 + elif [[ "${CASE}" = "C384" ]]; then + export npe_eobs=100 + elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ "${machine}" = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ "${step}" = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ "${step}" = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ "${CASE}" = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ "${CASE}" = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=8 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ "${step}" = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ "${CASE}" = "C384" || "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ "${step}" = "esfc" ]]; then + + export wtime_esfc="00:08:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ "${step}" = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=1 + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ "${step}" = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ "${step}" = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "npoess" ]]; then + + export wtime_npoess="03:30:00" + export npe_npoess=1 + export npe_node_npoess=1 + export nth_npoess=1 + export memory_npoess="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +elif [[ ${step} = "mos_stn_prep" ]]; then + + export wtime_mos_stn_prep="00:10:00" + export npe_mos_stn_prep=3 + export npe_node_mos_stn_prep=3 + export nth_mos_stn_prep=1 + export memory_mos_stn_prep="5GB" + export NTASK="${npe_mos_stn_prep}" + export PTILE="${npe_node_mos_stn_prep}" + +elif [[ ${step} = "mos_grd_prep" ]]; then + + export wtime_mos_grd_prep="00:10:00" + export npe_mos_grd_prep=4 + export npe_node_mos_grd_prep=4 + export nth_mos_grd_prep=1 + export memory_mos_grd_prep="16GB" + export NTASK="${npe_mos_grd_prep}" + export PTILE="${npe_node_mos_grd_prep}" + +elif [[ ${step} = "mos_ext_stn_prep" ]]; then + + export wtime_mos_ext_stn_prep="00:15:00" + export npe_mos_ext_stn_prep=2 + export npe_node_mos_ext_stn_prep=2 + export nth_mos_ext_stn_prep=1 + export memory_mos_ext_stn_prep="5GB" + export NTASK="${npe_mos_ext_stn_prep}" + export PTILE="${npe_node_mos_ext_stn_prep}" + +elif [[ ${step} = "mos_ext_grd_prep" ]]; then + + export wtime_mos_ext_grd_prep="00:10:00" + export npe_mos_ext_grd_prep=7 + export npe_node_mos_ext_grd_prep=7 + export nth_mos_ext_grd_prep=1 + export memory_mos_ext_grd_prep="3GB" + export NTASK="${npe_mos_ext_grd_prep}" + export PTILE="${npe_node_mos_ext_grd_prep}" + +elif [[ ${step} = "mos_stn_fcst" ]]; then + + export wtime_mos_stn_fcst="00:10:00" + export npe_mos_stn_fcst=5 + export npe_node_mos_stn_fcst=5 + export nth_mos_stn_fcst=1 + export memory_mos_stn_fcst="40GB" + export NTASK="${npe_mos_stn_fcst}" + export PTILE="${npe_node_mos_stn_fcst}" + +elif [[ ${step} = "mos_grd_fcst" ]]; then + + export wtime_mos_grd_fcst="00:10:00" + export npe_mos_grd_fcst=7 + export npe_node_mos_grd_fcst=7 + export nth_mos_grd_fcst=1 + export memory_mos_grd_fcst="50GB" + export NTASK="${npe_mos_grd_fcst}" + export PTILE="${npe_node_mos_grd_fcst}" + +elif [[ ${step} = "mos_ext_stn_fcst" ]]; then + + export wtime_mos_ext_stn_fcst="00:20:00" + export npe_mos_ext_stn_fcst=3 + export npe_node_mos_ext_stn_fcst=3 + export nth_mos_ext_stn_fcst=1 + export memory_mos_ext_stn_fcst="50GB" + export NTASK="${npe_mos_ext_stn_fcst}" + export PTILE="${npe_node_mos_ext_stn_fcst}" + export prepost=True + +elif [[ ${step} = "mos_ext_grd_fcst" ]]; then + + export wtime_mos_ext_grd_fcst="00:10:00" + export npe_mos_ext_grd_fcst=7 + export npe_node_mos_ext_grd_fcst=7 + export nth_mos_ext_grd_fcst=1 + export memory_mos_ext_grd_fcst="50GB" + export NTASK="${npe_mos_ext_grd_fcst}" + export PTILE="${npe_node_mos_ext_grd_fcst}" + +elif [[ ${step} = "mos_stn_prdgen" ]]; then + + export wtime_mos_stn_prdgen="00:10:00" + export npe_mos_stn_prdgen=1 + export npe_node_mos_stn_prdgen=1 + export nth_mos_stn_prdgen=1 + export memory_mos_stn_prdgen="15GB" + export NTASK="${npe_mos_stn_prdgen}" + export PTILE="${npe_node_mos_stn_prdgen}" + export prepost=True + +elif [[ ${step} = "mos_grd_prdgen" ]]; then + + export wtime_mos_grd_prdgen="00:40:00" + export npe_mos_grd_prdgen=72 + export npe_node_mos_grd_prdgen=18 + export nth_mos_grd_prdgen=4 + export memory_mos_grd_prdgen="20GB" + export NTASK="${npe_mos_grd_prdgen}" + export PTILE="${npe_node_mos_grd_prdgen}" + export OMP_NUM_THREADS="${nth_mos_grd_prdgen}" + +elif [[ ${step} = "mos_ext_stn_prdgen" ]]; then + + export wtime_mos_ext_stn_prdgen="00:10:00" + export npe_mos_ext_stn_prdgen=1 + export npe_node_mos_ext_stn_prdgen=1 + export nth_mos_ext_stn_prdgen=1 + export memory_mos_ext_stn_prdgen="15GB" + export NTASK="${npe_mos_ext_stn_prdgen}" + export PTILE="${npe_node_mos_ext_stn_prdgen}" + export prepost=True + +elif [[ ${step} = "mos_ext_grd_prdgen" ]]; then + + export wtime_mos_ext_grd_prdgen="00:30:00" + export npe_mos_ext_grd_prdgen=96 + export npe_node_mos_ext_grd_prdgen=6 + export nth_mos_ext_grd_prdgen=16 + export memory_mos_ext_grd_prdgen="30GB" + export NTASK="${npe_mos_ext_grd_prdgen}" + export PTILE="${npe_node_mos_ext_grd_prdgen}" + export OMP_NUM_THREADS="${nth_mos_ext_grd_prdgen}" + +elif [[ ${step} = "mos_wx_prdgen" ]]; then + + export wtime_mos_wx_prdgen="00:10:00" + export npe_mos_wx_prdgen=4 + export npe_node_mos_wx_prdgen=2 + export nth_mos_wx_prdgen=2 + export memory_mos_wx_prdgen="10GB" + export NTASK="${npe_mos_wx_prdgen}" + export PTILE="${npe_node_mos_wx_prdgen}" + export OMP_NUM_THREADS="${nth_mos_wx_prdgen}" + +elif [[ ${step} = "mos_wx_ext_prdgen" ]]; then + + export wtime_mos_wx_ext_prdgen="00:10:00" + export npe_mos_wx_ext_prdgen=4 + export npe_node_mos_wx_ext_prdgen=2 + export nth_mos_wx_ext_prdgen=2 + export memory_mos_wx_ext_prdgen="10GB" + export NTASK="${npe_mos_wx_ext_prdgen}" + export PTILE="${npe_node_mos_wx_ext_prdgen}" + export OMP_NUM_THREADS="${nth_mos_wx_ext_prdgen}" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.sfcanl b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.stage_ic b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.stage_ic new file mode 100644 index 0000000000..7f3956af4d --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.stage_ic @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +########## config.stage_ic ########## + +echo "BEGIN: config.stage_ic" + +# Get task specific resources +source "${EXPDIR}/config.resources" stage_ic + +case "${CASE}" in + "C48" | "C96") + export CPL_ATMIC="workflow_${CASE}_refactored" + export CPL_ICEIC="workflow_${CASE}_refactored" + export CPL_OCNIC="workflow_${CASE}_refactored" + export CPL_WAVIC="workflow_${CASE}_refactored" + ;; + "C384") + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c_refactored + export CPL_ICEIC=CPC_refactored + export CPL_OCNIC=CPC3Dvar_refactored + export CPL_WAVIC=workflow_C384_refactored + ;; + "C768") + export CPL_ATMIC=HR2_refactored + export CPL_ICEIC=HR1_refactored + export CPL_OCNIC=HR1_refactored + export CPL_WAVIC=HR1_refactored + ;; + "C1152") + export CPL_ATMIC=HR2_C1152_refactored + export CPL_ICEIC=HR3_refactored + export CPL_OCNIC=HR3_refactored + export CPL_WAVIC=HR1_refactored + ;; + *) + echo "FATAL ERROR Unrecognized resolution: ${CASE}" + exit 1 + ;; +esac + +echo "END: config.stage_ic" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.tracker b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.tracker new file mode 100644 index 0000000000..71fcf9196d --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.tracker @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.tracker ########## +echo "BEGIN: config.tracker" + +# Get task specific resources +. "${EXPDIR}/config.resources" tracker + +# Get tropcy settings +. "${EXPDIR}/config.tropcy" + +echo "END: config.tracker" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.tropcy b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.tropcy new file mode 100644 index 0000000000..718abe3be5 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.tropcy @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.tropcy ########## +echo "BEGIN: config.tropcy" + +# Tracker/genesis package location +export HOMEens_tracker=${BASE_GIT}/TC_tracker/${ens_tracker_ver} + +export SENDCOM="YES" # Needed by tracker scripts still + +export FHOUT_CYCLONE=6 +FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) +export FHMAX_CYCLONE + +echo "END: config.tropcy" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ufs b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ufs new file mode 100644 index 0000000000..b03801876c --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ufs @@ -0,0 +1,519 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if (( $# <= 1 )); then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|glo_200|glo_500|mx025" + echo "--gocart" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_gocart=true +skip_mediator=true + +# Loop through named arguments +while (( $# > 0 )); do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + shift + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + shift + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + shift + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + shift + ;; + "--gocart") + skip_gocart=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION" | "HERCULES") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + *) + echo "FATAL ERROR: Unrecognized machine ${machine}" + exit 14 + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="40.0,1.77,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=6.0e-3 # setting for UGWPv1 non-stationary GWD + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="20.0,2.5,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=3.0e-3 # setting for UGWPv1 non-stationary GWD + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="10.0,3.5,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=1.5e-3 # setting for UGWPv1 non-stationary GWD + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=8 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=2 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="5.0,5.0,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=0.8e-3 # setting for UGWPv1 non-stationary GWD + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + export layout_y_gfs=12 + #JKHexport layout_y_gfs=16 + export nthreads_fv3=4 + #JKHexport nthreads_fv3_gfs=4 + export nthreads_fv3_gfs=2 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="2.5,7.5,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=0.5e-3 # setting for UGWPv1 non-stationary GWD + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20 #Note this should be 10 for WCOSS2 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=40 #Note this should be 10 for WCOSS2 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="1.67,8.8,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=0.35e-3 # setting for UGWPv1 non-stationary GWD + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="0.625,14.1,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=0.13e-3 # setting for UGWPv1 non-stationary GWD + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; + *) + echo "FATAL ERROR: Unrecognized FV3 resolution ${fv3_res}" + exit 15 + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." +export cplwav2atm=".false." +export CCPP_SUITE="FV3_GFS_v17_p8_ugwpv1_c3_mynn" +model_list="atm" + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export cpl=".true." + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 + export CCPP_SUITE="FV3_GFS_v17_coupled_p8_ugwpv1" # TODO: Does this include FV3_GFS_v17_p8? Can this be used instead of FV3_GFS_v17_p8? +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + source "${EXPDIR}/config.ocn" + export cplflx=".true." + model_list="${model_list}.ocean" + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + eps_imesh="4.0e-1" + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_25L.nc" + MOM6_DIAG_MISVAL="0.0" + MOM6_ALLOW_LANDMASK_CHANGES='False' + TOPOEDITS="" + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.1deg.nc" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + eps_imesh="2.5e-1" + TOPOEDITS="ufs.topo_edits_011818.nc" + if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" + else + MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" + MOM6_DIAG_MISVAL="-1e34" + fi + MOM6_ALLOW_LANDMASK_CHANGES='True' + ;; + "050") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + eps_imesh="1.0e-1" + if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" + else + MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" + MOM6_DIAG_MISVAL="-1e34" + fi + MOM6_ALLOW_LANDMASK_CHANGES='False' + TOPOEDITS="" + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + eps_imesh="1.0e-1" + if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" + else + MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" + MOM6_DIAG_MISVAL="-1e34" + fi + MOM6_ALLOW_LANDMASK_CHANGES='False' + TOPOEDITS="" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export TOPOEDITS + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING + export eps_imesh + export MOM6_DIAG_COORD_DEF_Z_FILE + export MOM6_DIAG_MISVAL + export MOM6_ALLOW_LANDMASK_CHANGES +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + source "${EXPDIR}/config.ice" + export cplice=".true." + model_list="${model_list}.ice" + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + source "${EXPDIR}/config.wave" + export cplwav=".true." + export cplwav2atm=".true." + model_list="${model_list}.wave" + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "glo_025") + ntasks_ww3=262 + ;; + "glo_200") + ntasks_ww3=30 + nthreads_ww3=1 + ;; + "glo_500") + ntasks_ww3=12 + nthreads_ww3=1 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +# GOCART specific settings +if [[ "${skip_gocart}" == "false" ]]; then + source "${EXPDIR}/config.aero" + export cplchm=".true." + model_list="${model_list}.aero" +fi + +# Set the name of the UFS (previously nems) configure template to use +case "${model_list}" in + atm) + export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.atm.IN" + ;; + atm.aero) + export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.atm_aero.IN" + ;; + atm.wave) + export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.leapfrog_atm_wav.IN" + ;; + atm.ocean.ice) + export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld.IN" + ;; + atm.ocean.ice.aero) + export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld_aero.IN" + ;; + atm.ocean.ice.wave) + export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld_outerwave.IN" + ;; + atm.ocean.ice.wave.aero) + export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld_aero_outerwave.IN" + ;; + *) + echo "FATAL ERROR: Unable to determine appropriate UFS configure template for ${model_list}" + exit 16 + ;; +esac + +if [[ ! -r "${ufs_configure_template}" ]]; then + echo "FATAL ERROR: ${ufs_configure_template} either doesn't exist or is not readable." + exit 17 +fi + +unset model_list + +echo "END: config.ufs" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ufs_c768_12x12_2th_1wg40wt b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ufs_c768_12x12_2th_1wg40wt new file mode 100644 index 0000000000..9b9c5ceb45 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ufs_c768_12x12_2th_1wg40wt @@ -0,0 +1,519 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if (( $# <= 1 )); then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|glo_200|glo_500|mx025" + echo "--gocart" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_gocart=true +skip_mediator=true + +# Loop through named arguments +while (( $# > 0 )); do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + shift + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + shift + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + shift + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + shift + ;; + "--gocart") + skip_gocart=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION" | "HERCULES") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + *) + echo "FATAL ERROR: Unrecognized machine ${machine}" + exit 14 + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="40.0,1.77,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=6.0e-3 # setting for UGWPv1 non-stationary GWD + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="20.0,2.5,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=3.0e-3 # setting for UGWPv1 non-stationary GWD + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="10.0,3.5,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=1.5e-3 # setting for UGWPv1 non-stationary GWD + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=8 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=2 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="5.0,5.0,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=0.8e-3 # setting for UGWPv1 non-stationary GWD + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + export layout_y_gfs=12 + #JKHexport layout_y_gfs=16 + export nthreads_fv3=4 + #JKHexport nthreads_fv3_gfs=4 + export nthreads_fv3_gfs=2 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="2.5,7.5,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=0.5e-3 # setting for UGWPv1 non-stationary GWD + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20 #Note this should be 10 for WCOSS2 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=40 #Note this should be 10 for WCOSS2 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="1.67,8.8,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=0.35e-3 # setting for UGWPv1 non-stationary GWD + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="0.625,14.1,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=0.13e-3 # setting for UGWPv1 non-stationary GWD + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; + *) + echo "FATAL ERROR: Unrecognized FV3 resolution ${fv3_res}" + exit 15 + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." +export cplwav2atm=".false." +export CCPP_SUITE="FV3_GFS_v17_p8_ugwpv1" +model_list="atm" + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export cpl=".true." + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 + export CCPP_SUITE="FV3_GFS_v17_coupled_p8_ugwpv1" # TODO: Does this include FV3_GFS_v17_p8? Can this be used instead of FV3_GFS_v17_p8? +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + source "${EXPDIR}/config.ocn" + export cplflx=".true." + model_list="${model_list}.ocean" + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + eps_imesh="4.0e-1" + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_25L.nc" + MOM6_DIAG_MISVAL="0.0" + MOM6_ALLOW_LANDMASK_CHANGES='False' + TOPOEDITS="" + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.1deg.nc" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + eps_imesh="2.5e-1" + TOPOEDITS="ufs.topo_edits_011818.nc" + if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" + else + MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" + MOM6_DIAG_MISVAL="-1e34" + fi + MOM6_ALLOW_LANDMASK_CHANGES='True' + ;; + "050") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + eps_imesh="1.0e-1" + if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" + else + MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" + MOM6_DIAG_MISVAL="-1e34" + fi + MOM6_ALLOW_LANDMASK_CHANGES='False' + TOPOEDITS="" + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + eps_imesh="1.0e-1" + if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" + else + MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" + MOM6_DIAG_MISVAL="-1e34" + fi + MOM6_ALLOW_LANDMASK_CHANGES='False' + TOPOEDITS="" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export TOPOEDITS + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING + export eps_imesh + export MOM6_DIAG_COORD_DEF_Z_FILE + export MOM6_DIAG_MISVAL + export MOM6_ALLOW_LANDMASK_CHANGES +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + source "${EXPDIR}/config.ice" + export cplice=".true." + model_list="${model_list}.ice" + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + source "${EXPDIR}/config.wave" + export cplwav=".true." + export cplwav2atm=".true." + model_list="${model_list}.wave" + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "glo_025") + ntasks_ww3=262 + ;; + "glo_200") + ntasks_ww3=30 + nthreads_ww3=1 + ;; + "glo_500") + ntasks_ww3=12 + nthreads_ww3=1 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +# GOCART specific settings +if [[ "${skip_gocart}" == "false" ]]; then + source "${EXPDIR}/config.aero" + export cplchm=".true." + model_list="${model_list}.aero" +fi + +# Set the name of the UFS (previously nems) configure template to use +case "${model_list}" in + atm) + export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.atm.IN" + ;; + atm.aero) + export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.atm_aero.IN" + ;; + atm.wave) + export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.leapfrog_atm_wav.IN" + ;; + atm.ocean.ice) + export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld.IN" + ;; + atm.ocean.ice.aero) + export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld_aero.IN" + ;; + atm.ocean.ice.wave) + export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld_outerwave.IN" + ;; + atm.ocean.ice.wave.aero) + export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld_aero_outerwave.IN" + ;; + *) + echo "FATAL ERROR: Unable to determine appropriate UFS configure template for ${model_list}" + exit 16 + ;; +esac + +if [[ ! -r "${ufs_configure_template}" ]]; then + echo "FATAL ERROR: ${ufs_configure_template} either doesn't exist or is not readable." + exit 17 +fi + +unset model_list + +echo "END: config.ufs" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ufs_c768_16x16_2th_2wg40wt b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ufs_c768_16x16_2th_2wg40wt new file mode 100644 index 0000000000..f5edcd06a3 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ufs_c768_16x16_2th_2wg40wt @@ -0,0 +1,519 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if (( $# <= 1 )); then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|glo_200|glo_500|mx025" + echo "--gocart" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_gocart=true +skip_mediator=true + +# Loop through named arguments +while (( $# > 0 )); do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + shift + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + shift + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + shift + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + shift + ;; + "--gocart") + skip_gocart=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION" | "HERCULES") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + *) + echo "FATAL ERROR: Unrecognized machine ${machine}" + exit 14 + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="40.0,1.77,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=6.0e-3 # setting for UGWPv1 non-stationary GWD + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="20.0,2.5,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=3.0e-3 # setting for UGWPv1 non-stationary GWD + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="10.0,3.5,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=1.5e-3 # setting for UGWPv1 non-stationary GWD + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=8 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=2 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="5.0,5.0,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=0.8e-3 # setting for UGWPv1 non-stationary GWD + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + #JKHexport layout_x_gfs=12 + export layout_x_gfs=16 + export layout_y_gfs=16 + export nthreads_fv3=4 + #JKHexport nthreads_fv3_gfs=4 + export nthreads_fv3_gfs=2 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="2.5,7.5,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=0.5e-3 # setting for UGWPv1 non-stationary GWD + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20 #Note this should be 10 for WCOSS2 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=40 #Note this should be 10 for WCOSS2 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="1.67,8.8,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=0.35e-3 # setting for UGWPv1 non-stationary GWD + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="0.625,14.1,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=0.13e-3 # setting for UGWPv1 non-stationary GWD + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; + *) + echo "FATAL ERROR: Unrecognized FV3 resolution ${fv3_res}" + exit 15 + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." +export cplwav2atm=".false." +export CCPP_SUITE="FV3_GFS_v17_p8_ugwpv1" +model_list="atm" + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export cpl=".true." + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 + export CCPP_SUITE="FV3_GFS_v17_coupled_p8_ugwpv1" # TODO: Does this include FV3_GFS_v17_p8? Can this be used instead of FV3_GFS_v17_p8? +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + source "${EXPDIR}/config.ocn" + export cplflx=".true." + model_list="${model_list}.ocean" + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + eps_imesh="4.0e-1" + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_25L.nc" + MOM6_DIAG_MISVAL="0.0" + MOM6_ALLOW_LANDMASK_CHANGES='False' + TOPOEDITS="" + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.1deg.nc" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + eps_imesh="2.5e-1" + TOPOEDITS="ufs.topo_edits_011818.nc" + if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" + else + MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" + MOM6_DIAG_MISVAL="-1e34" + fi + MOM6_ALLOW_LANDMASK_CHANGES='True' + ;; + "050") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + eps_imesh="1.0e-1" + if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" + else + MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" + MOM6_DIAG_MISVAL="-1e34" + fi + MOM6_ALLOW_LANDMASK_CHANGES='False' + TOPOEDITS="" + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + eps_imesh="1.0e-1" + if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" + else + MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" + MOM6_DIAG_MISVAL="-1e34" + fi + MOM6_ALLOW_LANDMASK_CHANGES='False' + TOPOEDITS="" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export TOPOEDITS + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING + export eps_imesh + export MOM6_DIAG_COORD_DEF_Z_FILE + export MOM6_DIAG_MISVAL + export MOM6_ALLOW_LANDMASK_CHANGES +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + source "${EXPDIR}/config.ice" + export cplice=".true." + model_list="${model_list}.ice" + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + source "${EXPDIR}/config.wave" + export cplwav=".true." + export cplwav2atm=".true." + model_list="${model_list}.wave" + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "glo_025") + ntasks_ww3=262 + ;; + "glo_200") + ntasks_ww3=30 + nthreads_ww3=1 + ;; + "glo_500") + ntasks_ww3=12 + nthreads_ww3=1 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +# GOCART specific settings +if [[ "${skip_gocart}" == "false" ]]; then + source "${EXPDIR}/config.aero" + export cplchm=".true." + model_list="${model_list}.aero" +fi + +# Set the name of the UFS (previously nems) configure template to use +case "${model_list}" in + atm) + export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.atm.IN" + ;; + atm.aero) + export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.atm_aero.IN" + ;; + atm.wave) + export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.leapfrog_atm_wav.IN" + ;; + atm.ocean.ice) + export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld.IN" + ;; + atm.ocean.ice.aero) + export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld_aero.IN" + ;; + atm.ocean.ice.wave) + export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld_outerwave.IN" + ;; + atm.ocean.ice.wave.aero) + export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld_aero_outerwave.IN" + ;; + *) + echo "FATAL ERROR: Unable to determine appropriate UFS configure template for ${model_list}" + exit 16 + ;; +esac + +if [[ ! -r "${ufs_configure_template}" ]]; then + echo "FATAL ERROR: ${ufs_configure_template} either doesn't exist or is not readable." + exit 17 +fi + +unset model_list + +echo "END: config.ufs" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.upp b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.upp new file mode 100644 index 0000000000..a1bd0a7d34 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.upp @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.upp ########## +# UPP specific + +echo "BEGIN: config.upp" + +# Get task specific resources +. "${EXPDIR}/config.resources" upp + +export UPP_CONFIG="${HOMEgfs}/parm/post/upp.yaml" + +# No. of forecast hours to process in a single job +export NFHRS_PER_GROUP=3 + +echo "END: config.upp" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.verfozn b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.verfozn new file mode 100644 index 0000000000..9eea0f25a3 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.verfozn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.verfozn ########## +echo "BEGIN: config.verfozn" + +# Get task specific resources +. "${EXPDIR}/config.resources" verfozn + +export DO_DATA_RPT=1 +export OZN_AREA="glb" +export OZNMON_SUFFIX=${NET} +export PARMmonitor=${PARMgfs}/monitor +export SATYPE_FILE=${PARMmonitor}/gdas_oznmon_satype.txt + +# Source the parm file +. "${PARMmonitor}/gdas_oznmon.parm" + +# Set up validation file +if [[ ${VALIDATE_DATA} -eq 1 ]]; then + export ozn_val_file=${PARMmonitor}/gdas_oznmon_base.tar +fi + +echo "END: config.verfozn" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.verfrad b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.verfrad new file mode 100644 index 0000000000..dd65020180 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.verfrad @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.verfrad ########## +echo "BEGIN: config.verfrad" + +# Get task specific resources +. "${EXPDIR}/config.resources" verfrad + +export PARMmonitor=${PARMgfs}/monitor +export satype_file=${PARMmonitor}/gdas_radmon_satype.txt + +# Source the parm file +. "${PARMmonitor}/da_mon.parm" + +# Other variables +export RAD_AREA="glb" +export MAKE_CTL=1 +export MAKE_DATA=1 +export USE_ANL=1 +export DO_DIAG_RPT=1 +export DO_DATA_RPT=1 + +export RADMON_SUFFIX=${RUN} +export CYCLE_INTERVAL=${assim_freq:-6} +export VERBOSE="YES" + +echo "END: config.verfrad" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.vminmon b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.vminmon new file mode 100644 index 0000000000..8929c36e0e --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.vminmon @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.vminmon ########## +echo "BEGIN: config.vminmon" + +# Get task specific resources +. "${EXPDIR}/config.resources" vminmon + +export MINMON_SUFFIX=${MINMON_SUFFIX:-${NET}} +export CYCLE_INTERVAL=${assim_freq:-6} + +export PARMmonitor=${PARMgfs}/monitor +export mm_gnormfile=${PARMmonitor}/${RUN}_minmon_gnorm.txt +export mm_costfile=${PARMmonitor}/${RUN}_minmon_cost.txt + +echo "END: config.vminmon" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wave b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wave new file mode 100644 index 0000000000..acb4c518ba --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wave @@ -0,0 +1,204 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="${HOMEgfs}/exec" +export FIXwave="${HOMEgfs}/fix/wave" +export PARMwave="${HOMEgfs}/parm/wave" +export USHwave="${HOMEgfs}/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +export waveGRD=${waveGRD:-'mx025'} + +#grid dependent variable defaults +export waveGRDN='1' # grid number for ww3_multi +export waveGRDG='10' # grid group for ww3_multi +export USE_WAV_RMP='NO' # YES/NO rmp grid remapping pre-processed coefficients +export waveMULTIGRID='.false.' # .true./.false. for multi or shel +export MESH_WAV="mesh.${waveGRD}.nc" # Mesh grid for wave model for CMEPS +export waveesmfGRD=' ' # input grid for multigrid + +#Grid dependent variables for various grids +case "${waveGRD}" in + "gnh_10m;aoc_9km;gsh_15m") + #GFSv16 settings: + export waveGRDN='1 2 3' + export waveGRDG='10 20 30' + export USE_WAV_RMP='YES' + export waveMULTIGRID='.true.' + export IOSRV='3' + export MESH_WAV=' ' + export waveesmfGRD='glox_10m' + export waveuoutpGRD='points' + export waveinterpGRD='glo_15mxt at_10m ep_10m wc_10m glo_30m' + export wavepostGRD='gnh_10m aoc_9km gsh_15m' + ;; + "gwes_30m") + #Grid used for P8 + export waveinterpGRD='' + export wavepostGRD='gwes_30m' + export waveuoutpGRD=${waveGRD} + ;; + "mx025") + #Grid used for HR1 (tripolar 1/4 deg) + export waveinterpGRD='reg025' + export wavepostGRD='' + export waveuoutpGRD=${waveGRD} + ;; + "glo_025") + #GEFSv13 regular lat/lon 1/4 deg grid + export waveinterpGRD='' + export wavepostGRD='glo_025' + export waveuoutpGRD=${waveGRD} + ;; + "glo_200") + #Global regular lat/lon 2deg deg grid + export waveinterpGRD='' + export wavepostGRD='glo_200' + export waveuoutpGRD=${waveGRD} + ;; + "glo_500") + #Global regular lat/lon 5deg deg grid + export waveinterpGRD='' + export wavepostGRD='glo_500' + export waveuoutpGRD=${waveGRD} + ;; + *) + echo "FATAL ERROR: No grid specific wave config values exist for ${waveGRD}. Aborting." + exit 1 + ;; +esac + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [[ "${CDUMP}" = "gdas" ]]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=${FHMAX_GFS} +fi +export WAVHINDH=0 +export FHMIN_WAV=0 +export FHOUT_WAV=3 +export FHMAX_HF_WAV=120 +export FHOUT_HF_WAV=1 +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(( FHOUT_HF_WAV * 3600 )) +export DTPNT_WAV=3600 +export FHINCP_WAV=$(( DTPNT_WAV / 3600 )) + +# Selected output parameters (gridded) +export OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" + +# Restart file config +if [[ "${CDUMP}" = "gdas" ]]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=48 # RTOFS forecasts only out to 8 days +elif [[ ${gfs_cyc} -ne 0 ]]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=192 # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=192 # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [[ "${CDUMP}" != gfs ]]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) # TODO: This calculation needs to move to parsing_namelists_WW3.sh + if [[ ${rst_dt_gfs} -gt 0 ]]; then + export DT_1_RST_WAV=0 #${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + #temporarily set to zero to avoid a clash in requested restart times + #which makes the wave model crash a fix for the model issue will be coming + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [[ ${RUNMEM} = -1 ]]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB="${RUNMEM: -2}" +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ ${DO_ICE} == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ ${DO_OCN} == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_shel/multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'1'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.waveawipsbulls b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.waveawipsbulls new file mode 100644 index 0000000000..65a8d5076b --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.waveawipsbulls @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.waveawipsgridded b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.waveawipsgridded new file mode 100644 index 0000000000..bd7c7c11e4 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.waveawipsgridded @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wavegempak b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wavegempak new file mode 100644 index 0000000000..bcbec91f07 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wavegempak @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.waveinit b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wavepostbndpnt b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wavepostbndpntbll b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wavepostpnt b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wavepostsbs b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wavepostsbs new file mode 100644 index 0000000000..8e74aae069 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wavepostsbs @@ -0,0 +1,28 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +if [[ -n "${waveinterpGRD}" ]]; then + export DOGRI_WAV='YES' # Create interpolated grids +else + export DOGRI_WAV='NO' # Do not create interpolated grids +fi +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.waveprep b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/pygraf_global_ugwpv1_c3_mynn.xml b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/pygraf_global_ugwpv1_c3_mynn.xml new file mode 100644 index 0000000000..55a2fc711e --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/pygraf_global_ugwpv1_c3_mynn.xml @@ -0,0 +1,134 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +]> + + + + &EXPDIR;/logs/@Y@m@d@H.log + + + &SDATE; &EDATE; &INTERVAL; + + + + 0 6 12 18 24 30 36 42 48 54 60 66 72 78 84 90 96 102 108 114 120 + 000 006 012 018 024 030 036 042 048 054 060 066 072 078 084 090 096 102 108 114 120 + + + &JOBS_DIR;/remapgrib.ksh + &ACCOUNT; + 1 + 00:35:00 + remapgrib_#T#_&PSLOT; + &ROTDIR;/logs/@Y@m@d@H/remapgrib_#T#.log + ROTDIR&ROTDIR; + CDUMP&CDUMP; + COMPONENT&COMPONENT; + yyyymmdd@Y@m@d + hh@H + fcst#T# + GRID_NAMES201D130D242 + + &ROTDIR;/&CDUMP;.@Y@m@d/@H/products/&COMPONENT;/grib2/0p25/&CDUMP;.t@Hz.pgrb2.&RES;.f#T# + + + + + + + + full 242 130 201 + full,Africa,Beijing,Cambodia,EPacific,Europe,Taiwan,WAtlantic,WPacific AK,AKZoom,AKZoom2 CONUS,NC,NE,NW,SC,SE,SW NHemi + global.yml globalAK.yml globalCONUS.yml globalNHemi.yml + + + + + + source &PYGRAFDIR;/pre.sh; + cd &PYGRAFDIR;; + python &PYGRAFDIR;/create_graphics.py \ + maps \ + -d &ROTDIR;/&CDUMP;.@Y@m@d/@H/products/&COMPONENT;/grib2/0p25/post/#GRID_ID#\ + -f 0 &FCST_LENGTH; 6 \ + --file_type prs \ + --file_tmpl "&CDUMP;.t@Hz.pgrb2.0p25.f{FCST_TIME:03d}"\ + --images &PYGRAFDIR;/image_lists/#IMGFILE# hourly\ + -m "GFSv17p8_UGWPV1_C3_MYNN" \ + -n ${SLURM_CPUS_ON_NODE:-12} \ + -o &ROTDIR;/&CDUMP;.@Y@m@d/@H/products/&COMPONENT;/pyprd \ + -s @Y@m@d@H \ + --tiles "#TILESET#" \ + -z &ROTDIR;/&CDUMP;.@Y@m@d/@H/products/&COMPONENT;/img + + + &ACCOUNT; + &QUEUE; + &RESOURCES_PYTHON; + &WALLTIME_PYTHON; + --exclusive + FV3GFS_python_maps_#GRID_ID#_@H_ugwpv1_c3_mynn + &ROTDIR;/logs/@Y@m@d@H/python_@Y@m@d@H00_maps_#GRID_ID#_0-6-&FCST_LENGTH;.log + + + + + + + + + + + diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/rt_pygraf_global_ugwpv1_c3_mynn.xml b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/rt_pygraf_global_ugwpv1_c3_mynn.xml new file mode 100644 index 0000000000..195ad638a9 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/rt_pygraf_global_ugwpv1_c3_mynn.xml @@ -0,0 +1,134 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +]> + + + + &EXPDIR;/logs/@Y@m@d@H.log + + + &SDATE; &EDATE; &INTERVAL; + + + + 0 6 12 18 24 30 36 42 48 54 60 66 72 78 84 90 96 102 108 114 120 + 000 006 012 018 024 030 036 042 048 054 060 066 072 078 084 090 096 102 108 114 120 + + + &JOBS_DIR;/remapgrib.ksh + &ACCOUNT; + 1 + 00:35:00 + remapgrib_#T#_&PSLOT; + &ROTDIR;/logs/@Y@m@d@H/remapgrib_#T#.log + ROTDIR&ROTDIR; + CDUMP&CDUMP; + COMPONENT&COMPONENT; + yyyymmdd@Y@m@d + hh@H + fcst#T# + GRID_NAMES201D130D242 + + &ROTDIR;/&CDUMP;.@Y@m@d/@H/products/&COMPONENT;/grib2/0p25/&CDUMP;.t@Hz.pgrb2.&RES;.f#T# + + + + + + + + full 242 130 201 + full,Africa,Beijing,Cambodia,EPacific,Europe,Taiwan,WAtlantic,WPacific AK,AKZoom,AKZoom2 CONUS,NC,NE,NW,SC,SE,SW NHemi + global.yml globalAK.yml globalCONUS.yml globalNHemi.yml + + + + + + source &PYGRAFDIR;/pre.sh; + cd &PYGRAFDIR;; + python &PYGRAFDIR;/create_graphics.py \ + maps \ + -d &ROTDIR;/&CDUMP;.@Y@m@d/@H/products/&COMPONENT;/grib2/0p25/post/#GRID_ID#\ + -f 0 &FCST_LENGTH; 6 \ + --file_type prs \ + --file_tmpl "&CDUMP;.t@Hz.pgrb2.0p25.f{FCST_TIME:03d}"\ + --images &PYGRAFDIR;/image_lists/#IMGFILE# hourly\ + -m "GFSv17p8_UGWPV1_C3_MYNN" \ + -n ${SLURM_CPUS_ON_NODE:-12} \ + -o &ROTDIR;/&CDUMP;.@Y@m@d/@H/products/&COMPONENT;/pyprd \ + -s @Y@m@d@H \ + --tiles "#TILESET#" \ + -z &ROTDIR;/&CDUMP;.@Y@m@d/@H/products/&COMPONENT;/img + + + &ACCOUNT; + &QUEUE; + &RESOURCES_PYTHON; + &WALLTIME_PYTHON; + --exclusive + FV3GFS_python_maps_#GRID_ID#_@H_ugwpv1_c3_mynn + &ROTDIR;/logs/@Y@m@d@H/python_@Y@m@d@H00_maps_#GRID_ID#_0-6-&FCST_LENGTH;.log + + + + + + + + + + + diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/rt_v17p8_ugwpv1_c3_mynn.crontab b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/rt_v17p8_ugwpv1_c3_mynn.crontab new file mode 100644 index 0000000000..428ce232aa --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/rt_v17p8_ugwpv1_c3_mynn.crontab @@ -0,0 +1,5 @@ + +#################### rt_v17p8_ugwpv1_c3_mynn #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.6/bin/rocotorun -d /home/role.rtfim/UFS-CAMsuite_dev1//FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/rt_v17p8_ugwpv1_c3_mynn.db -w /home/role.rtfim/UFS-CAMsuite_dev1//FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/rt_v17p8_ugwpv1_c3_mynn.xml +################################################################# diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/rt_v17p8_ugwpv1_c3_mynn.xml b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/rt_v17p8_ugwpv1_c3_mynn.xml new file mode 100644 index 0000000000..0a801597b5 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/rt_v17p8_ugwpv1_c3_mynn.xml @@ -0,0 +1,229 @@ + + + + + + + + + + + + + +]> + + + + &EXPDIR;/logs/@Y@m@d@H.log + + + 202403100000 202501290000 24:00:00 + + + + &JOBS_DIR;/makeinit_link.sh + + &PSLOT;_gfsinit_@H + gsd-fv3 + batch + hera + 00:02:00 + 1:ppn=1:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsinit.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + ICSDIR&ICSDIR; + CASE&CASE; + COMPONENT&COMPONENT; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT&ROTDIR;/../RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H/model_data/atmos/input + + + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile6.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile6.nc + + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3 + batch + hera + 04:50:00 + + 56:ppn=40:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT&ROTDIR;/../RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f012 _f018-f030 _f036-f048 _f054-f066 _f072-f084 _f090-f102 _f108-f120 + f012 f030 f048 f066 f084 f102 f120 + f000_f006_f012 f018_f024_f030 f036_f042_f048 f054_f060_f066 f072_f078_f084 f090_f096_f102 f108_f114_f120 + + + + &JOBS_DIR;/atmos_products.sh + + &PSLOT;_gfsatmprod#grp#_@H + gsd-fv3 + batch + hera + 00:15:00 + 1:ppn=24:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsatmprod#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT&ROTDIR;/../RUNDIRS/&PSLOT; + FHRLST#lst# + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/master/gfs.t@Hz.master.grb2#dep# + + + + + + + + + &JOBS_DIR;/arch.sh + + &PSLOT;_gfsarch_@H + gsd-fv3 + batch + service + 06:00:00 + 1:ppn=1:tpp=1 + 4096M + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsarch.log + + RUN_ENVIRemc + HOMEgfs/scratch1/BMC/gsd-fv3/rtruns/UFS-CAMsuite_dev1 + EXPDIR/home/role.rtfim/UFS-CAMsuite_dev1//FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn + ROTDIR/home/role.rtfim/UFS-CAMsuite_dev1//FV3GFSrun/rt_v17p8_ugwpv1_c3_mynn + ATCFNAME&ATCFNAME; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT&ROTDIR;/../RUNDIRS/&PSLOT; + + + + + + + + + diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/rt_v17p8_ugwpv1_c3_mynn.xml_gen b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/rt_v17p8_ugwpv1_c3_mynn.xml_gen new file mode 100644 index 0000000000..9a0c5f75c0 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/rt_v17p8_ugwpv1_c3_mynn.xml_gen @@ -0,0 +1,296 @@ + + + + + + + + + + + + +]> + + + + /home/role.rtfim/UFS-CAMsuite_dev1//FV3GFSwfm/rt_v17p8_ugwpv1_c3/logs/@Y@m@d@H.log + + + 202401140000 202401140000 24:00:00 + + + + &JOBS_DIR;/stage_ic.sh + + &PSLOT;_gfsstage_ic_@H + gsd-fv3 + batch + hera + 00:15:00 + 1:ppn=1:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsstage_ic.log + + RUN_ENVIRemc + HOMEgfs/scratch1/BMC/gsd-fv3/rtruns/UFS-CAMsuite_dev1 + EXPDIR/home/role.rtfim/UFS-CAMsuite_dev1//FV3GFSwfm/rt_v17p8_ugwpv1_c3 + ROTDIR/home/role.rtfim/UFS-CAMsuite_dev1//FV3GFSrun/rt_v17p8_ugwpv1_c3 + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3/rtruns/UFS-CAMsuite_dev1/FV3GFSrun//RUNDIRS/rt_v17p8_ugwpv1_c3 + + + + /scratch1/NCEPDEV/global/glopara/data/ICSDIR/prototype_ICs/HR2_refactored/@Y@m@d@H/atmos/gfs_ctrl.nc + /scratch1/NCEPDEV/global/glopara/data/ICSDIR/prototype_ICs/HR2_refactored/@Y@m@d@H/atmos/gfs_data.tile1.nc + /scratch1/NCEPDEV/global/glopara/data/ICSDIR/prototype_ICs/HR2_refactored/@Y@m@d@H/atmos/gfs_data.tile2.nc + /scratch1/NCEPDEV/global/glopara/data/ICSDIR/prototype_ICs/HR2_refactored/@Y@m@d@H/atmos/gfs_data.tile3.nc + /scratch1/NCEPDEV/global/glopara/data/ICSDIR/prototype_ICs/HR2_refactored/@Y@m@d@H/atmos/gfs_data.tile4.nc + /scratch1/NCEPDEV/global/glopara/data/ICSDIR/prototype_ICs/HR2_refactored/@Y@m@d@H/atmos/gfs_data.tile5.nc + /scratch1/NCEPDEV/global/glopara/data/ICSDIR/prototype_ICs/HR2_refactored/@Y@m@d@H/atmos/gfs_data.tile6.nc + /scratch1/NCEPDEV/global/glopara/data/ICSDIR/prototype_ICs/HR2_refactored/@Y@m@d@H/atmos/sfc_data.tile1.nc + /scratch1/NCEPDEV/global/glopara/data/ICSDIR/prototype_ICs/HR2_refactored/@Y@m@d@H/atmos/sfc_data.tile2.nc + /scratch1/NCEPDEV/global/glopara/data/ICSDIR/prototype_ICs/HR2_refactored/@Y@m@d@H/atmos/sfc_data.tile3.nc + /scratch1/NCEPDEV/global/glopara/data/ICSDIR/prototype_ICs/HR2_refactored/@Y@m@d@H/atmos/sfc_data.tile4.nc + /scratch1/NCEPDEV/global/glopara/data/ICSDIR/prototype_ICs/HR2_refactored/@Y@m@d@H/atmos/sfc_data.tile5.nc + /scratch1/NCEPDEV/global/glopara/data/ICSDIR/prototype_ICs/HR2_refactored/@Y@m@d@H/atmos/sfc_data.tile6.nc + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3 + batch + hera + 06:00:00 + 56:ppn=40:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs/scratch1/BMC/gsd-fv3/rtruns/UFS-CAMsuite_dev1 + EXPDIR/home/role.rtfim/UFS-CAMsuite_dev1//FV3GFSwfm/rt_v17p8_ugwpv1_c3 + ROTDIR/home/role.rtfim/UFS-CAMsuite_dev1//FV3GFSrun/rt_v17p8_ugwpv1_c3 + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3/rtruns/UFS-CAMsuite_dev1/FV3GFSrun//RUNDIRS/rt_v17p8_ugwpv1_c3 + + + + + + + + + + + + _f000-f012 _f018-f030 _f036-f048 _f054-f066 _f072-f084 _f090-f102 _f108-f120 + f012 f030 f048 f066 f084 f102 f120 + f000_f006_f012 f018_f024_f030 f036_f042_f048 f054_f060_f066 f072_f078_f084 f090_f096_f102 f108_f114_f120 + + + + &JOBS_DIR;/atmos_products.sh + + &PSLOT;_gfsatmprod#grp#_@H + gsd-fv3 + batch + hera + 00:15:00 + 1:ppn=24:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsatmprod#grp#.log + + RUN_ENVIRemc + HOMEgfs/scratch1/BMC/gsd-fv3/rtruns/UFS-CAMsuite_dev1 + EXPDIR/home/role.rtfim/UFS-CAMsuite_dev1//FV3GFSwfm/rt_v17p8_ugwpv1_c3 + ROTDIR/home/role.rtfim/UFS-CAMsuite_dev1//FV3GFSrun/rt_v17p8_ugwpv1_c3 + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3/rtruns/UFS-CAMsuite_dev1/FV3GFSrun//RUNDIRS/rt_v17p8_ugwpv1_c3 + FHRLST#lst# + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/master/gfs.t@Hz.master.grb2#dep# + + + + + + + + + &JOBS_DIR;/tracker.sh + + &PSLOT;_gfstracker_@H + gsd-fv3 + batch + hera + 00:10:00 + 1:ppn=1:tpp=1 + 4G + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfstracker.log + + RUN_ENVIRemc + HOMEgfs/scratch1/BMC/gsd-fv3/rtruns/UFS-CAMsuite_dev1 + EXPDIR/home/role.rtfim/UFS-CAMsuite_dev1//FV3GFSwfm/rt_v17p8_ugwpv1_c3 + ROTDIR/home/role.rtfim/UFS-CAMsuite_dev1//FV3GFSrun/rt_v17p8_ugwpv1_c3 + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3/rtruns/UFS-CAMsuite_dev1/FV3GFSrun//RUNDIRS/rt_v17p8_ugwpv1_c3 + + + + + + + + + + &JOBS_DIR;/genesis.sh + + &PSLOT;_gfsgenesis_@H + gsd-fv3 + batch + hera + 00:25:00 + 1:ppn=1:tpp=1 + 4G + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsgenesis.log + + RUN_ENVIRemc + HOMEgfs/scratch1/BMC/gsd-fv3/rtruns/UFS-CAMsuite_dev1 + EXPDIR/home/role.rtfim/UFS-CAMsuite_dev1//FV3GFSwfm/rt_v17p8_ugwpv1_c3 + ROTDIR/home/role.rtfim/UFS-CAMsuite_dev1//FV3GFSrun/rt_v17p8_ugwpv1_c3 + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3/rtruns/UFS-CAMsuite_dev1/FV3GFSrun//RUNDIRS/rt_v17p8_ugwpv1_c3 + + + + + + + + + + &JOBS_DIR;/arch.sh + + &PSLOT;_gfsarch_@H + gsd-fv3 + batch + service + 06:00:00 + 1:ppn=1:tpp=1 + 4096M + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsarch.log + + RUN_ENVIRemc + HOMEgfs/scratch1/BMC/gsd-fv3/rtruns/UFS-CAMsuite_dev1 + EXPDIR/home/role.rtfim/UFS-CAMsuite_dev1//FV3GFSwfm/rt_v17p8_ugwpv1_c3 + ROTDIR/home/role.rtfim/UFS-CAMsuite_dev1//FV3GFSrun/rt_v17p8_ugwpv1_c3 + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3/rtruns/UFS-CAMsuite_dev1/FV3GFSrun//RUNDIRS/rt_v17p8_ugwpv1_c3 + + + + + + + + + + + + + + &JOBS_DIR;/cleanup.sh + + &PSLOT;_gfscleanup_@H + gsd-fv3 + batch + hera + 01:00:00 + 1:ppn=1:tpp=1 + 4096M + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfscleanup.log + + RUN_ENVIRemc + HOMEgfs/scratch1/BMC/gsd-fv3/rtruns/UFS-CAMsuite_dev1 + EXPDIR/home/role.rtfim/UFS-CAMsuite_dev1//FV3GFSwfm/rt_v17p8_ugwpv1_c3 + ROTDIR/home/role.rtfim/UFS-CAMsuite_dev1//FV3GFSrun/rt_v17p8_ugwpv1_c3 + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3/rtruns/UFS-CAMsuite_dev1/FV3GFSrun//RUNDIRS/rt_v17p8_ugwpv1_c3 + + + + + + + + + + diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/runcmds b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/runcmds new file mode 100644 index 0000000000..e1451d66c3 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/runcmds @@ -0,0 +1,15 @@ +rocotorun -w ${HOME}/UFS-CAMsuite_dev1/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/rt_v17p8_ugwpv1_c3_mynn.xml -d ${HOME}/rt_dbfiles/rt_v17p8_ugwpv1_c3_mynn.db +rocotostat -w ${HOME}/UFS-CAMsuite_dev1/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/rt_v17p8_ugwpv1_c3_mynn.xml -d ${HOME}/rt_dbfiles/rt_v17p8_ugwpv1_c3_mynn.db -c `date --date='4 days ago' +%Y%m%d0000`: | m + +rocotorun -w ${HOME}/UFS-CAMsuite_dev1/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/v17p8_ugwpv1_c3_mynn.xml -d ${HOME}/retro_dbfiles/v17p8_ugwpv1_c3_mynn.db +rocotostat -w ${HOME}/UFS-CAMsuite_dev1/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/v17p8_ugwpv1_c3_mynn.xml -d ${HOME}/retro_dbfiles/v17p8_ugwpv1_c3_mynn.db + + + +PyGraf workflow +=============== +rocotorun -w ${HOME}/UFS-CAMsuite_dev1/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/rt_pygraf_global_ugwpv1_c3_mynn.xml -d ${HOME}/rt_dbfiles/rt_pygraf_global_ugwpv1_c3_mynn.db +rocotostat -w ${HOME}/UFS-CAMsuite_dev1/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/rt_pygraf_global_ugwpv1_c3_mynn.xml -d ${HOME}/rt_dbfiles/rt_pygraf_global_ugwpv1_c3_mynn.db -c `date --date='4 days ago' +%Y%m%d0000`: | m + +rocotorun -w pygraf_global_ugwpv1_c3_mynn.xml -d pygraf_global_ugwpv1_c3_mynn.db +rocotostat -w pygraf_global_ugwpv1_c3_mynn.xml -d pygraf_global_ugwpv1_c3_mynn.db diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/v17p8_ugwpv1_c3_mynn.xml b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/v17p8_ugwpv1_c3_mynn.xml new file mode 100644 index 0000000000..166a1645af --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/v17p8_ugwpv1_c3_mynn.xml @@ -0,0 +1,234 @@ + + + + + + + + + + + + + +]> + + + + &EXPDIR;/logs/@Y@m@d@H.log + + + 202403100000 202403150000 24:00:00 + + + + &JOBS_DIR;/makeinit_link.sh + + &PSLOT;_gfsinit_@H + gsd-fv3 + batch + hera + 00:02:00 + 1:ppn=1:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsinit.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + ICSDIR&ICSDIR; + CASE&CASE; + COMPONENT&COMPONENT; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT&ROTDIR;/../RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H/model_data/atmos/input + + + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile6.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile6.nc + + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3 + batch + hera + 04:50:00 + + 56:ppn=40:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT&ROTDIR;/../RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f012 _f018-f030 _f036-f048 _f054-f066 _f072-f084 _f090-f102 _f108-f120 + f012 f030 f048 f066 f084 f102 f120 + f000_f006_f012 f018_f024_f030 f036_f042_f048 f054_f060_f066 f072_f078_f084 f090_f096_f102 f108_f114_f120 + + + + &JOBS_DIR;/atmos_products.sh + + &PSLOT;_gfsatmprod#grp#_@H + gsd-fv3 + batch + hera + 00:15:00 + 1:ppn=24:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsatmprod#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT&ROTDIR;/../RUNDIRS/&PSLOT; + FHRLST#lst# + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/master/gfs.t@Hz.master.grb2#dep# + + + + + + + + + &JOBS_DIR;/arch.sh + + &PSLOT;_gfsarch_@H + gsd-fv3 + batch + service + 06:00:00 + 1:ppn=1:tpp=1 + 4096M + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsarch.log + + RUN_ENVIRemc + HOMEgfs/scratch1/BMC/gsd-fv3/rtruns/UFS-CAMsuite_dev1 + EXPDIR/home/role.rtfim/UFS-CAMsuite_dev1//FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn + ROTDIR/home/role.rtfim/UFS-CAMsuite_dev1//FV3GFSrun/rt_v17p8_ugwpv1_c3_mynn + ATCFNAME&ATCFNAME; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT&ROTDIR;/../RUNDIRS/&PSLOT; + + + + + + + + + + diff --git a/INFO b/INFO new file mode 100644 index 0000000000..a332aebaf0 --- /dev/null +++ b/INFO @@ -0,0 +1,21 @@ +18Mar24 +======= +update config.fcst to correct dt_inner to 75 + effective 00Z 03/19 + +09Mar24 +======= +change from v17_p8_c3 to v17_p8_c3_mynn + effective 00Z 03/10 + +29Jan24 +======== +rsync gsl_ufs_rtdev1 from /scratch1/BMC/gsd-fv3-dev/jhender/test/gsl_ufs_rtdev1 excluding .git* files + (git clone --recursive) + + 02Jan24 global-workflow + UFS: 21Dec23, 991d652 + UPP: 07Nov23, 78f369b + + use submodules instead of checkout.sh + gfsatmos_products replaces gfspost tasks diff --git a/parm/config/gfs/config.fcst b/parm/config/gfs/config.fcst index 33237fb6c7..a4c4ee8072 100644 --- a/parm/config/gfs/config.fcst +++ b/parm/config/gfs/config.fcst @@ -119,7 +119,7 @@ fi # PBL/turbulence schemes export hybedmf=".false." -if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3_mynn" ]] ; then +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_c3_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3_mynn" ]] ; then export satmedmf=".false." export isatmedmf=0 export shal_cnv=".false." @@ -163,7 +163,7 @@ export iopt_trs="2" # Convection Options: 2-SASAS, 3-GF export progsigma=".true." -if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3_mynn" ]] ; then +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_c3_mynn" ]] ; then export imfdeepcnv=5 export imfshalcnv=-1 ## JKH - no shallow GF elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_c3" ]] ; then @@ -211,25 +211,27 @@ case ${imp_physics} in export ncld=2 export nwat=6 - if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3_mynn" || - "$CCPP_SUITE" == "FV3_GFS_v17_p8_thompson" ]] ; then - export ltaerosol=".true." - export FIELD_TABLE="$HOMEgfs/parm/ufs/fv3/field_table_thompson_aero_tke${tbp}" - else - export ltaerosol=".false." - export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_thompson_noaero_tke${tbp}" - fi - export cal_pre=".false." export random_clds=".false." export effr_in=".true." export lradar=".true." export ttendlim="-999" - export dt_inner=$((DELTIM/2)) export sedi_semi=.true. - if [[ "${sedi_semi}" == .true. ]]; then export dt_inner=${DELTIM} ; fi export decfl=10 + if [[ "${CCPP_SUITE}" == "FV3_GFS_v17_p8_ugwpv1_mynn" || "${CCPP_SUITE}" == "FV3_GFS_v17_p8_ugwpv1_c3_mynn" || "${CCPP_SUITE}" == "FV3_GFS_v17_p8_mynn" || "${CCPP_SUITE}" == "FV3_GFS_v17_p8_c3_mynn" || + "${CCPP_SUITE}" == "FV3_GFS_v17_p8_thompson" ]] ; then + #JKH keep dt_inner $DELTIM/2 (75) if running aerosol-aware Thompson + export dt_inner=$((DELTIM/2)) + export ltaerosol=".true." + export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_thompson_aero_tke${tbp}" + else + export dt_inner=$((DELTIM/2)) + if [[ "${sedi_semi}" == .true. ]]; then export dt_inner=${DELTIM} ; fi + export ltaerosol=".false." + export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_thompson_noaero_tke${tbp}" + fi + export hord_mt_nh_nonmono=5 export hord_xx_nh_nonmono=5 export vtdm4_nh_nonmono=0.02 diff --git a/parm/config/gfs/config.fcst_dt-inner=150 b/parm/config/gfs/config.fcst_dt-inner=150 new file mode 100644 index 0000000000..54716573cc --- /dev/null +++ b/parm/config/gfs/config.fcst_dt-inner=150 @@ -0,0 +1,333 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case ${WAVE_CDUMP} in + both | "${CDUMP/enkf}" ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 ${CASE}" +[[ "${DO_OCN}" == "YES" ]] && string="${string} --mom6 ${OCNRES}" +[[ "${DO_ICE}" == "YES" ]] && string="${string} --cice6 ${ICERES}" +[[ "${DO_WAVE}" == "YES" ]] && string="${string} --ww3 ${waveGRD// /;}" +[[ "${DO_AERO}" == "YES" ]] && string="${string} --gocart" +# We are counting on $string being multiple arguments +# shellcheck disable=SC2086 +source "${EXPDIR}/config.ufs" ${string} + + +# Get task specific resources +source "${EXPDIR}/config.resources" fcst +export domains_stack_size="16000000" + + +if [[ "${DONST}" == "YES" ]]; then + source "${EXPDIR}/config.nsst" +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### + +export FORECASTSH="${HOMEgfs}/scripts/exglobal_forecast.sh" +#export FORECASTSH="${HOMEgfs}/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="${HOMEgfs}/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" +#JKHexport range_warn=".false." ## JKH + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if (( gwd_opt == 1 )); then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + launch_level=$(echo "${LEVS}/2.35" |bc) + export launch_level +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if (( gwd_opt == 2 )); then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=1 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".false." + export do_ugwp_v1=".true." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".true." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".true." + export do_ugwp_v1_orog_only=".false." + launch_level=$(echo "${LEVS}/2.35" |bc) + export launch_level + if [[ ${do_gsl_drag_ls_bl} == ".true." ]]; then + export cdmbgwd=${cdmbgwd_gsl} + fi +fi + +# Sponge layer settings +export tau=0. +export rf_cutoff=10. +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if (( LEVS == 128 )) && [[ "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + export shal_cnv=".false." + export do_mynnedmf=".true." + export do_mynnsfclay=".false." + export icloud_bl=1 + export bl_mynn_tkeadvect=".true." + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 + export lcnorm=".true." ## JKH +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "${satmedmf}" == ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "${progsigma}" == ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +export progsigma=".true." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3_mynn" ]] ; then + export imfdeepcnv=5 + export imfshalcnv=-1 ## JKH - no shallow GF +elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then + export progsigma=.false. + export imfdeepcnv=5 + export imfshalcnv=5 +elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + else + export imfshalcnv=2 + fi +fi + +#Convection schemes ### JKH - affects field table name +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +case ${imp_physics} in + 99) # ZhaoCarr + export ncld=1 + export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + ;; + 6) # WSM6 + export ncld=2 + export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_wsm6${tbf}${tbp}" + export nwat=6 + ;; + 8) # Thompson + export ncld=2 + export nwat=6 + + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3_mynn" || + "$CCPP_SUITE" == "FV3_GFS_v17_p8_thompson" ]] ; then + export ltaerosol=".true." + export FIELD_TABLE="$HOMEgfs/parm/ufs/fv3/field_table_thompson_aero_tke${tbp}" + else + export ltaerosol=".false." + export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_thompson_noaero_tke${tbp}" + fi + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export lradar=".true." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "${sedi_semi}" == .true. ]]; then export dt_inner=${DELTIM} ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + ;; + 11) # GFDL + export ncld=5 + export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + ;; + *) echo "Unknown microphysics option, ABORT!" ;; +esac + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="ufs.frac" +if [[ "${FRAC_GRID:-".true."}" == ".false." ]]; then + export cplmode="ufs.nfrac" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "${CDUMP}" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="${HOMEgfs}/parm/ufs/fv3/diag_table_da" + + if [[ "${DOIAU}" == "YES" ]]; then + export restart_interval="3" + else + export restart_interval="6" + fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "${CDUMP}" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="${HOMEgfs}/parm/ufs/fv3/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval=${restart_interval_gfs:-12} + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "${CASE}" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/parm/config/gfs/config.ufs_c768_12x12_2th_1wg40wt b/parm/config/gfs/config.ufs_c768_12x12_2th_1wg40wt index 5fbc6bc651..b03801876c 100644 --- a/parm/config/gfs/config.ufs_c768_12x12_2th_1wg40wt +++ b/parm/config/gfs/config.ufs_c768_12x12_2th_1wg40wt @@ -276,7 +276,7 @@ export cplice=".false." export cplchm=".false." export cplwav=".false." export cplwav2atm=".false." -export CCPP_SUITE="FV3_GFS_v17_p8_ugwpv1_c3" +export CCPP_SUITE="FV3_GFS_v17_p8_ugwpv1_c3_mynn" model_list="atm" # Mediator specific settings diff --git a/parm/config/gfs/config.ufs_c768_16x16_2th_2wg40wt b/parm/config/gfs/config.ufs_c768_16x16_2th_2wg40wt index ab41ade0ca..506d8c4785 100644 --- a/parm/config/gfs/config.ufs_c768_16x16_2th_2wg40wt +++ b/parm/config/gfs/config.ufs_c768_16x16_2th_2wg40wt @@ -276,7 +276,7 @@ export cplice=".false." export cplchm=".false." export cplwav=".false." export cplwav2atm=".false." -export CCPP_SUITE="FV3_GFS_v17_p8_ugwpv1_c3" +export CCPP_SUITE="FV3_GFS_v17_p8_ugwpv1_c3_mynn" model_list="atm" # Mediator specific settings diff --git a/sorc/build_ufs.sh b/sorc/build_ufs.sh index 1907171223..2a63a1e678 100755 --- a/sorc/build_ufs.sh +++ b/sorc/build_ufs.sh @@ -5,7 +5,7 @@ cwd=$(pwd) # Default settings APP="S2SWA" -CCPP_SUITES="FV3_GFS_v17_p8_ugwpv1,FV3_GFS_v17_coupled_p8_ugwpv1,FV3_GFS_v17_p8_ugwpv1_mynn,FV3_GFS_v17_p8_ugwpv1_c3" # TODO: does the g-w need to build with all these CCPP_SUITES? +CCPP_SUITES="FV3_GFS_v17_p8_ugwpv1,FV3_GFS_v17_coupled_p8_ugwpv1,FV3_GFS_v17_p8_ugwpv1_mynn,FV3_GFS_v17_p8_ugwpv1_c3_mynn,FV3_GFS_v17_p8_ugwpv1_c3" # TODO: does the g-w need to build with all these CCPP_SUITES? while getopts ":da:j:v" option; do case "${option}" in