diff --git a/env/HERA.env b/env/HERA.env index 11f4c082c6..e8efa3f8b3 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -130,14 +130,18 @@ elif [ $step = "eupd" ]; then elif [ $step = "fcst" ]; then + #PEs and PEs/node can differ for GFS and GDAS forecasts if threading differs + if [[ $CDUMP == "gfs" ]]; then + npe_fcst=$npe_fcst_gfs + npe_node_fcst=$npe_node_fcst_gfs + nth_fv3=$nth_fv3_gfs + fi + nth_max=$(($npe_node_max / $npe_node_fcst)) export NTHREADS_FV3=${nth_fv3:-$nth_max} [[ $NTHREADS_FV3 -gt $nth_max ]] && export NTHREADS_FV3=$nth_max export cores_per_node=$npe_node_max - if [[ $CDUMP == "gfs" ]]; then - npe_fcst=$npe_fcst_gfs - fi export APRUN_FV3="$launcher -n $npe_fcst" export NTHREADS_REGRID_NEMSIO=${nth_regrid_nemsio:-1} diff --git a/env/JET.env b/env/JET.env index aa47a46530..113c2d1b37 100755 --- a/env/JET.env +++ b/env/JET.env @@ -99,6 +99,13 @@ elif [ $step = "eupd" ]; then elif [ $step = "fcst" ]; then + #PEs and PEs/node can differ for GFS and GDAS forecasts if threading differs + if [[ $CDUMP == "gfs" ]]; then + npe_fcst=$npe_fcst_gfs + npe_node_fcst=$npe_node_fcst_gfs + nth_fv3=$nth_fv3_gfs + fi + nth_max=$(($npe_node_max / $npe_node_fcst)) export NTHREADS_FV3=${nth_fv3:-$nth_max} diff --git a/env/ORION.env b/env/ORION.env index c31646130a..4845e3fff6 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -139,14 +139,18 @@ elif [ $step = "eupd" ]; then elif [ $step = "fcst" ]; then + #PEs and PEs/node can differ for GFS and GDAS forecasts if threading differs + if [[ $CDUMP == "gfs" ]]; then + npe_fcst=$npe_fcst_gfs + npe_node_fcst=$npe_node_fcst_gfs + nth_fv3=$nth_fv3_gfs + fi + nth_max=$(($npe_node_max / $npe_node_fcst)) export NTHREADS_FV3=${nth_fv3:-$nth_max} [[ $NTHREADS_FV3 -gt $nth_max ]] && export NTHREADS_FV3=$nth_max export cores_per_node=$npe_node_max - if [[ $CDUMP == "gfs" ]]; then - npe_fcst=$npe_fcst_gfs - fi export APRUN_FV3="$launcher -n $npe_fcst" export NTHREADS_REGRID_NEMSIO=${nth_regrid_nemsio:-1} diff --git a/env/WCOSS_C.env b/env/WCOSS_C.env index f79586a884..ea715a0ba6 100755 --- a/env/WCOSS_C.env +++ b/env/WCOSS_C.env @@ -115,17 +115,20 @@ elif [ $step = "eupd" ]; then elif [ $step = "fcst" ]; then + #PEs and PEs/node can differ for GFS and GDAS forecasts if threading differs + if [[ $CDUMP == "gfs" ]]; then + npe_fcst=$npe_fcst_gfs + npe_node_fcst=$npe_node_fcst_gfs + nth_fv3=$nth_fv3_gfs + fi + nth_max=$(($npe_node_max / $npe_node_fcst)) export NTHREADS_FV3=${nth_fv3:-$nth_max} [[ $NTHREADS_FV3 -gt $nth_max ]] && export NTHREADS_FV3=$nth_max export cores_per_node=$npe_node_max #export APRUN_FV3="$launcher -j 1 -n ${npe_fv3:-$npe_fcst} -N $npe_node_fcst -d $NTHREADS_FV3 -cc depth" - if [ $CDUMP = "gdas" ]; then - export APRUN_FV3="$launcher -j 1 -n ${npe_fcst} -N $npe_node_fcst -d $NTHREADS_FV3 -cc depth" - else - export APRUN_FV3="$launcher -j 1 -n ${npe_fcst_gfs} -N $npe_node_fcst -d $NTHREADS_FV3 -cc depth" - fi + export APRUN_FV3="$launcher -j 1 -n ${npe_fcst} -N $npe_node_fcst -d $NTHREADS_FV3 -cc depth" export NTHREADS_REGRID_NEMSIO=${nth_regrid_nemsio:-1} [[ $NTHREADS_REGRID_NEMSIO -gt $nth_max ]] && export NTHREADS_REGRID_NEMSIO=$nth_max diff --git a/env/WCOSS_DELL_P3.env b/env/WCOSS_DELL_P3.env index 785a44f69a..b7761de2cc 100755 --- a/env/WCOSS_DELL_P3.env +++ b/env/WCOSS_DELL_P3.env @@ -120,17 +120,20 @@ elif [ $step = "eupd" ]; then elif [ $step = "fcst" ]; then + #PEs and PEs/node can differ for GFS and GDAS forecasts if threading differs + if [[ $CDUMP == "gfs" ]]; then + npe_fcst=$npe_fcst_gfs + npe_node_fcst=$npe_node_fcst_gfs + nth_fv3=$nth_fv3_gfs + fi + nth_max=$(($npe_node_max / $npe_node_fcst)) export NTHREADS_FV3=${nth_fv3:-$nth_max} [[ $NTHREADS_FV3 -gt $nth_max ]] && export NTHREADS_FV3=$nth_max export cores_per_node=$npe_node_max - if [ $CDUMP = "gdas" ]; then - #export APRUN_FV3="$launcher ${npe_fv3:-${npe_fcst:-$PBS_NP}}" - export APRUN_FV3="$launcher ${npe_fcst:-$PBS_NP}" - else - export APRUN_FV3="$launcher ${npe_fcst_gfs:-$PBS_NP}" - fi + #export APRUN_FV3="$launcher ${npe_fv3:-${npe_fcst:-$PBS_NP}}" + export APRUN_FV3="$launcher ${npe_fcst:-$PBS_NP}" export NTHREADS_REGRID_NEMSIO=${nth_regrid_nemsio:-1} [[ $NTHREADS_REGRID_NEMSIO -gt $nth_max ]] && export NTHREADS_REGRID_NEMSIO=$nth_max export APRUN_REGRID_NEMSIO="$launcher $LEVS" diff --git a/jobs/rocoto/arch.sh b/jobs/rocoto/arch.sh index db60386f7a..7d17f12c87 100755 --- a/jobs/rocoto/arch.sh +++ b/jobs/rocoto/arch.sh @@ -129,10 +129,18 @@ fi ############################################################### -# Archive data to HPSS -if [ $HPSSARCH = "YES" ]; then +# Archive data either to HPSS or locally +if [[ $HPSSARCH = "YES" || $LOCALARCH = "YES" ]]; then ############################################################### +# --set the archiving command and create local directories, if necessary +TARCMD="htar" +if [[ $LOCALARCH = "YES" ]]; then + TARCMD="tar" + [ ! -d $ATARDIR/$CDATE ] && mkdir -p $ATARDIR/$CDATE + [ ! -d $ATARDIR/$CDATE_MOS -a -d $ROTDIR/gfsmos.$PDY_MOS -a $cyc -eq 18 ] && mkdir -p $ATARDIR/$CDATE_MOS +fi + #--determine when to save ICs for warm start and forecast-only runs SAVEWARMICA="NO" SAVEWARMICB="NO" @@ -208,10 +216,10 @@ if [ $CDUMP = "gfs" ]; then #--save mdl gfsmos output from all cycles in the 18Z archive directory if [ -d gfsmos.$PDY_MOS -a $cyc -eq 18 ]; then - htar -P -cvf $ATARDIR/$CDATE_MOS/gfsmos.tar ./gfsmos.$PDY_MOS + $TARCMD -P -cvf $ATARDIR/$CDATE_MOS/gfsmos.tar ./gfsmos.$PDY_MOS status=$? if [ $status -ne 0 -a $CDATE -ge $firstday ]; then - echo "HTAR $CDATE gfsmos.tar failed" + echo "$(echo $TARCMD | tr 'a-z' 'A-Z') $CDATE gfsmos.tar failed" exit $status fi fi @@ -240,10 +248,10 @@ fi # Turn on extended globbing options shopt -s extglob for targrp in $targrp_list; do - htar -P -cvf $ATARDIR/$CDATE/${targrp}.tar $(cat $ARCH_LIST/${targrp}.txt) + $TARCMD -P -cvf $ATARDIR/$CDATE/${targrp}.tar $(cat $ARCH_LIST/${targrp}.txt) status=$? if [ $status -ne 0 -a $CDATE -ge $firstday ]; then - echo "HTAR $CDATE ${targrp}.tar failed" + echo "$(echo $TARCMD | tr 'a-z' 'A-Z') $CDATE ${targrp}.tar failed" exit $status fi done diff --git a/jobs/rocoto/earc.sh b/jobs/rocoto/earc.sh index 17337bff12..2a09c79e1f 100755 --- a/jobs/rocoto/earc.sh +++ b/jobs/rocoto/earc.sh @@ -60,7 +60,14 @@ cd $ROTDIR ################################################################### # ENSGRP > 0 archives a group of ensemble members firstday=$($NDATE +24 $SDATE) -if [[ $ENSGRP -gt 0 ]] && [[ $HPSSARCH = "YES" ]]; then +if [[ $ENSGRP -gt 0 ]] && [[ $HPSSARCH = "YES" || $LOCALARCH = "YES" ]]; then + +#--set the archiving command and create local directories, if necessary + TARCMD="htar" + if [[ $LOCALARCH = "YES" ]]; then + TARCMD="tar" + [ ! -d $ATARDIR/$CDATE ] && mkdir -p $ATARDIR/$CDATE + fi #--determine when to save ICs for warm start SAVEWARMICA="NO" @@ -84,27 +91,27 @@ if [[ $ENSGRP -gt 0 ]] && [[ $HPSSARCH = "YES" ]]; then if [ $CDATE -gt $SDATE ]; then # Don't run for first half cycle - htar -P -cvf $ATARDIR/$CDATE/enkf${CDUMP}_grp${ENSGRP}.tar $(cat $ARCH_LIST/enkf${CDUMP}_grp${n}.txt) + $TARCMD -P -cvf $ATARDIR/$CDATE/enkf${CDUMP}_grp${ENSGRP}.tar $(cat $ARCH_LIST/enkf${CDUMP}_grp${n}.txt) status=$? if [ $status -ne 0 -a $CDATE -ge $firstday ]; then - echo "HTAR $CDATE enkf${CDUMP}_grp${ENSGRP}.tar failed" + echo "$(echo $TARCMD | tr 'a-z' 'A-Z') $CDATE enkf${CDUMP}_grp${ENSGRP}.tar failed" exit $status fi if [ $SAVEWARMICA = "YES" -a $cyc -eq $EARCINC_CYC ]; then - htar -P -cvf $ATARDIR/$CDATE/enkf${CDUMP}_restarta_grp${ENSGRP}.tar $(cat $ARCH_LIST/enkf${CDUMP}_restarta_grp${n}.txt) + $TARCMD -P -cvf $ATARDIR/$CDATE/enkf${CDUMP}_restarta_grp${ENSGRP}.tar $(cat $ARCH_LIST/enkf${CDUMP}_restarta_grp${n}.txt) status=$? if [ $status -ne 0 ]; then - echo "HTAR $CDATE enkf${CDUMP}_restarta_grp${ENSGRP}.tar failed" + echo "$(echo $TARCMD | tr 'a-z' 'A-Z') $CDATE enkf${CDUMP}_restarta_grp${ENSGRP}.tar failed" exit $status fi fi if [ $SAVEWARMICB = "YES" -a $cyc -eq $EARCICS_CYC ]; then - htar -P -cvf $ATARDIR/$CDATE/enkf${CDUMP}_restartb_grp${ENSGRP}.tar $(cat $ARCH_LIST/enkf${CDUMP}_restartb_grp${n}.txt) + $TARCMD -P -cvf $ATARDIR/$CDATE/enkf${CDUMP}_restartb_grp${ENSGRP}.tar $(cat $ARCH_LIST/enkf${CDUMP}_restartb_grp${n}.txt) status=$? if [ $status -ne 0 ]; then - echo "HTAR $CDATE enkf${CDUMP}_restartb_grp${ENSGRP}.tar failed" + echo "$(echo $TARCMD | tr 'a-z' 'A-Z') $CDATE enkf${CDUMP}_restartb_grp${ENSGRP}.tar failed" exit $status fi fi @@ -118,12 +125,19 @@ fi # ENSGRP 0 archives ensemble means and copy data to online archive if [ $ENSGRP -eq 0 ]; then - if [ $HPSSARCH = "YES" ]; then + if [[ $HPSSARCH = "YES" || $LOCALARCH = "YES" ]]; then + +#--set the archiving command and create local directories, if necessary + TARCMD="htar" + if [[ $LOCALARCH = "YES" ]]; then + TARCMD="tar" + [ ! -d $ATARDIR/$CDATE ] && mkdir -p $ATARDIR/$CDATE + fi - htar -P -cvf $ATARDIR/$CDATE/enkf${CDUMP}.tar $(cat $ARCH_LIST/enkf${CDUMP}.txt) + $TARCMD -P -cvf $ATARDIR/$CDATE/enkf${CDUMP}.tar $(cat $ARCH_LIST/enkf${CDUMP}.txt) status=$? if [ $status -ne 0 -a $CDATE -ge $firstday ]; then - echo "HTAR $CDATE enkf${CDUMP}.tar failed" + echo "$(echo $TARCMD | tr 'a-z' 'A-Z') $CDATE enkf${CDUMP}.tar failed" exit $status fi fi diff --git a/parm/config/config.base.emc.dyn b/parm/config/config.base.emc.dyn index 5f8825c04a..7647fe40e0 100755 --- a/parm/config/config.base.emc.dyn +++ b/parm/config/config.base.emc.dyn @@ -115,7 +115,7 @@ export RUNDIR="$STMP/RUNDIRS/$PSLOT" export DATAROOT="$RUNDIR/$CDATE/$CDUMP" export ARCDIR="$NOSCRUB/archive/$PSLOT" export ICSDIR="@ICSDIR@" -export ATARDIR="/NCEPDEV/$HPSS_PROJECT/1year/$USER/$machine/scratch/$PSLOT" +export ATARDIR="@ATARDIR@" # Commonly defined parameters in JJOBS export envir=${envir:-"prod"} @@ -350,6 +350,11 @@ export DO_VSDB="YES" # Run VSDB package - set VSDB settings in config.vr # Archiving options export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ $HPSSARCH = "YES" ]] && [[ $LOCALARCH = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi export ARCH_CYC=00 # Archive data at this cycle for warm_start capability export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability diff --git a/parm/config/config.fcst b/parm/config/config.fcst index 1a246ba999..ba3b2bb3c6 100755 --- a/parm/config/config.fcst +++ b/parm/config/config.fcst @@ -9,7 +9,6 @@ echo "BEGIN: config.fcst" # Source model specific information that is resolution dependent . $EXPDIR/config.fv3 $CASE -[[ "$CDUMP" == "gfs" ]] && export nth_fv3=$nth_fv3_gfs # Turn off waves if not used for this CDUMP case $WAVE_CDUMP in diff --git a/parm/config/config.resources b/parm/config/config.resources index 354e3cf5dd..5ae2c642e6 100755 --- a/parm/config/config.resources +++ b/parm/config/config.resources @@ -234,8 +234,11 @@ elif [ $step = "fcst" ]; then NTASKS_TOT=$ATMPETS export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} export npe_node_fcst=$(echo "$npe_node_max / $nth_fcst" | bc) + export npe_node_fcst_gfs=$(echo "$npe_node_max / $nth_fcst_gfs" | bc) + if [[ "$machine" == "WCOSS_C" ]]; then export memory_fcst="1024M"; fi if [[ $DO_WAVE == "YES" ]]; then diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index 39da6e8db9..b7910d6286 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -204,41 +204,41 @@ cd ${pwd}/../fix ||exit 8 cd gdas $LINK ../../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gdas.v1.0.0/fix/gdas_minmon_cost.txt . $LINK ../../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gdas.v1.0.0/fix/gdas_minmon_gnorm.txt . - $LINK ../../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon.v2.0.0/fix/gdas_oznmon_base.tar . - $LINK ../../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon.v2.0.0/fix/gdas_oznmon_satype.txt . - $LINK ../../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon.v3.0.0/fix/gdas_radmon_base.tar . - $LINK ../../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon.v3.0.0/fix/gdas_radmon_satype.txt . - $LINK ../../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon.v3.0.0/fix/gdas_radmon_scaninfo.txt . + $LINK ../../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon/fix/gdas_oznmon_base.tar . + $LINK ../../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon/fix/gdas_oznmon_satype.txt . + $LINK ../../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_base.tar . + $LINK ../../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_satype.txt . + $LINK ../../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_scaninfo.txt . cd ${pwd}/../jobs ||exit 8 $LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gdas.v1.0.0/jobs/JGDAS_ATMOS_VMINMON . $LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gfs.v1.0.0/jobs/JGFS_ATMOS_VMINMON . - $LINK ../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon.v2.0.0/jobs/JGDAS_ATMOS_VERFOZN . - $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon.v3.0.0/jobs/JGDAS_ATMOS_VERFRAD . + $LINK ../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon/jobs/JGDAS_ATMOS_VERFOZN . + $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/jobs/JGDAS_ATMOS_VERFRAD . cd ${pwd}/../parm ||exit 8 [[ -d mon ]] && rm -rf mon mkdir -p mon cd mon - $LINK ../../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon.v3.0.0/parm/gdas_radmon.parm da_mon.parm + $LINK ../../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/parm/gdas_radmon.parm da_mon.parm # $LINK ../../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gdas.v1.0.0/parm/gdas_minmon.parm . # $LINK ../../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gfs.v1.0.0/parm/gfs_minmon.parm . - $LINK ../../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon.v2.0.0/parm/gdas_oznmon.parm . -# $LINK ../../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon.v3.0.0/parm/gdas_radmon.parm . + $LINK ../../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon/parm/gdas_oznmon.parm . +# $LINK ../../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/parm/gdas_radmon.parm . cd ${pwd}/../scripts ||exit 8 $LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gdas.v1.0.0/scripts/exgdas_atmos_vminmon.sh . $LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gfs.v1.0.0/scripts/exgfs_atmos_vminmon.sh . - $LINK ../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon.v2.0.0/scripts/exgdas_atmos_verfozn.sh . - $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon.v3.0.0/scripts/exgdas_atmos_verfrad.sh . + $LINK ../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon/scripts/exgdas_atmos_verfozn.sh . + $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/scripts/exgdas_atmos_verfrad.sh . cd ${pwd}/../ush ||exit 8 $LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/minmon_shared.v1.0.1/ush/minmon_xtrct_costs.pl . $LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/minmon_shared.v1.0.1/ush/minmon_xtrct_gnorms.pl . $LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/minmon_shared.v1.0.1/ush/minmon_xtrct_reduct.pl . - $LINK ../sorc/gsi.fd/util/Ozone_Monitor/nwprod/oznmon_shared.v2.0.0/ush/ozn_xtrct.sh . - $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared.v3.0.0/ush/radmon_ck_stdout.sh . - $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared.v3.0.0/ush/radmon_err_rpt.sh . - $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared.v3.0.0/ush/radmon_verf_angle.sh . - $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared.v3.0.0/ush/radmon_verf_bcoef.sh . - $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared.v3.0.0/ush/radmon_verf_bcor.sh . - $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared.v3.0.0/ush/radmon_verf_time.sh . + $LINK ../sorc/gsi.fd/util/Ozone_Monitor/nwprod/oznmon_shared/ush/ozn_xtrct.sh . + $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_ck_stdout.sh . + $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_err_rpt.sh . + $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_angle.sh . + $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_bcoef.sh . + $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_bcor.sh . + $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_time.sh . #------------------------------ @@ -340,22 +340,22 @@ cd ${pwd}/../sorc || exit 8 $SLINK gsi.fd/src/ncdiag ncdiag_cat.fd [[ -d oznmon_horiz.fd ]] && rm -rf oznmon_horiz.fd - $SLINK gsi.fd/util/Ozone_Monitor/nwprod/oznmon_shared.v2.0.0/sorc/oznmon_horiz.fd oznmon_horiz.fd + $SLINK gsi.fd/util/Ozone_Monitor/nwprod/oznmon_shared/sorc/oznmon_horiz.fd oznmon_horiz.fd [[ -d oznmon_time.fd ]] && rm -rf oznmon_time.fd - $SLINK gsi.fd/util/Ozone_Monitor/nwprod/oznmon_shared.v2.0.0/sorc/oznmon_time.fd oznmon_time.fd + $SLINK gsi.fd/util/Ozone_Monitor/nwprod/oznmon_shared/sorc/oznmon_time.fd oznmon_time.fd [[ -d radmon_angle.fd ]] && rm -rf radmon_angle.fd - $SLINK gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared.v3.0.0/sorc/verf_radang.fd radmon_angle.fd + $SLINK gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radang.fd radmon_angle.fd [[ -d radmon_bcoef.fd ]] && rm -rf radmon_bcoef.fd - $SLINK gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared.v3.0.0/sorc/verf_radbcoef.fd radmon_bcoef.fd + $SLINK gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radbcoef.fd radmon_bcoef.fd [[ -d radmon_bcor.fd ]] && rm -rf radmon_bcor.fd - $SLINK gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared.v3.0.0/sorc/verf_radbcor.fd radmon_bcor.fd + $SLINK gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radbcor.fd radmon_bcor.fd [[ -d radmon_time.fd ]] && rm -rf radmon_time.fd - $SLINK gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared.v3.0.0/sorc/verf_radtime.fd radmon_time.fd + $SLINK gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radtime.fd radmon_time.fd [[ -d recentersigp.fd ]] && rm -rf recentersigp.fd $SLINK gsi.fd/util/EnKF/gfs/src/recentersigp.fd recentersigp.fd diff --git a/ush/forecast_predet.sh b/ush/forecast_predet.sh index 786b759d4b..5b9015613b 100755 --- a/ush/forecast_predet.sh +++ b/ush/forecast_predet.sh @@ -122,7 +122,6 @@ FV3_GFS_predet(){ #file and the value of npe_node_fcst is not correctly defined when using more than #one thread and sets NTHREADS_FV3=1 even when the number of threads is appropraitely >1 #NTHREADS_FV3=${NTHREADS_FV3:-${NTHREADS_FCST:-${nth_fv3:-1}}} - NTHREADS_FV3=${nth_fv3:-1} cores_per_node=${cores_per_node:-${npe_node_fcst:-24}} ntiles=${ntiles:-6} if [ $MEMBER -lt 0 ]; then diff --git a/ush/hpssarch_gen.sh b/ush/hpssarch_gen.sh index cac15a37d6..07a59ff5ba 100755 --- a/ush/hpssarch_gen.sh +++ b/ush/hpssarch_gen.sh @@ -106,12 +106,17 @@ if [ $type = "gfs" ]; then fi echo "${dirname}${head}pgrb2.0p25.anl " >>gfsa.txt echo "${dirname}${head}pgrb2.0p25.anl.idx " >>gfsa.txt - echo "${dirname}avno.t${cyc}z.cyclone.trackatcfunix " >>gfsa.txt - echo "${dirname}avnop.t${cyc}z.cyclone.trackatcfunix " >>gfsa.txt - echo "${dirname}trak.gfso.atcfunix.${PDY}${cyc} " >>gfsa.txt - echo "${dirname}trak.gfso.atcfunix.altg.${PDY}${cyc} " >>gfsa.txt - echo "${dirname}storms.gfso.atcf_gen.${PDY}${cyc} " >>gfsa.txt - echo "${dirname}storms.gfso.atcf_gen.altg.${PDY}${cyc} " >>gfsa.txt + #Only generated if there are cyclones to track + cyclone_files=(avno.t${cyc}z.cyclone.trackatcfunix + avnop.t${cyc}z.cyclone.trackatcfunix + trak.gfso.atcfunix.${PDY}${cyc} + trak.gfso.atcfunix.altg.${PDY}${cyc} + storms.gfso.atcf_gen.${PDY}${cyc} + storms.gfso.atcf_gen.altg.${PDY}${cyc}) + + for file in ${cyclone_files[@]}; do + [[ -s $ROTDIR/${dirname}${file} ]] && echo "${dirname}${file}" >>gfsa.txt + done if [ $DO_DOWN = "YES" ]; then if [ $DO_BUFRSND = "YES" ]; then @@ -451,10 +456,18 @@ if [ $type = "enkfgdas" -o $type = "enkfgfs" ]; then fi fi done # loop over FHR - for fstep in eobs eomg ecen esfc eupd efcs epos ; do + for fstep in eobs ecen esfc eupd efcs epos ; do echo "logs/${CDATE}/${CDUMP}${fstep}*.log " >>enkf${CDUMP}.txt done +# eomg* are optional jobs + for log in $ROTDIR/logs/${CDATE}/${CDUMP}eomg*.log; do + if [ -s "$log" ]; then + echo "logs/${CDATE}/${CDUMP}eomg*.log " >>enkf${CDUMP}.txt + fi + break + done + # Ensemble spread file only available with netcdf output fh=3 diff --git a/ush/parsing_namelists_FV3.sh b/ush/parsing_namelists_FV3.sh index eede235512..6249e1206e 100755 --- a/ush/parsing_namelists_FV3.sh +++ b/ush/parsing_namelists_FV3.sh @@ -86,16 +86,8 @@ cat > input.nml <> input.nml << EOF - dz_min = ${dz_min:-"6"} ! no longer in develop branch - psm_bc = ${psm_bc:-"0"} ! no longer in develop branch -EOF -fi - -cat >> input.nml << EOF + dz_min = ${dz_min:-"6"} + psm_bc = ${psm_bc:-"0"} grid_type = -1 make_nh = $make_nh fv_debug = ${fv_debug:-".false."} @@ -323,17 +315,12 @@ cat >> input.nml <> input.nml << EOF frac_grid = ${FRAC_GRID:-".true."} cplchm = ${cplchem:-".false."} - cplflx = $cplflx - cplice = ${cplice} - cplwav2atm = ${cplwav2atm} + cplflx = ${cplflx:-".false."} + cplice = ${cplice-".false."} + cplwav2atm = ${cplwav2atm-".false."} EOF -fi # Add namelist for IAU if [ $DOIAU = "YES" ]; then diff --git a/ush/rocoto/setup_expt.py b/ush/rocoto/setup_expt.py index 3f1b086760..22da510648 100755 --- a/ush/rocoto/setup_expt.py +++ b/ush/rocoto/setup_expt.py @@ -135,11 +135,13 @@ def edit_baseconfig(host, inputs): "@QUEUE@": host.info["queue"], "@QUEUE_SERVICE@": host.info["queue_service"], "@PARTITION_BATCH@": host.info["partition_batch"], - "@EXP_WARM_START@": inputs.start, + "@EXP_WARM_START@": inputs.warm_start, "@MODE@": inputs.mode, "@CHGRP_RSTPROD@": host.info["chgrp_rstprod"], "@CHGRP_CMD@": host.info["chgrp_cmd"], "@HPSSARCH@": host.info["hpssarch"], + "@LOCALARCH@": host.info["localarch"], + "@ATARDIR@": host.info["atardir"], "@gfs_cyc@": inputs.gfs_cyc, "@APP@": inputs.app, } @@ -234,6 +236,12 @@ def input_args(): if args.app in ['S2S', 'S2SW'] and args.icsdir is None: raise SyntaxError("An IC directory must be specified with --icsdir when running the S2S or S2SW app") + # Add an entry for warm_start = .true. or .false. + if args.start == "warm": + args.warm_start = ".true." + else: + args.warm_start = ".false." + return args diff --git a/ush/rocoto/workflow_utils.py b/ush/rocoto/workflow_utils.py index d9bb34d207..6ffd12888e 100755 --- a/ush/rocoto/workflow_utils.py +++ b/ush/rocoto/workflow_utils.py @@ -78,6 +78,8 @@ def wcoss_c(self): 'chgrp_rstprod': 'YES', 'chgrp_cmd': 'chgrp rstprod', 'hpssarch': 'YES', + 'localarch': 'NO', + 'atardir': '/NCEPDEV/$HPSS_PROJECT/1year/$USER/$machine/scratch/$PSLOT', } return info @@ -102,6 +104,8 @@ def wcoss_dell_p3(self): 'chgrp_rstprod': 'YES', 'chgrp_cmd': 'chgrp rstprod', 'hpssarch': 'YES', + 'localarch': 'NO', + 'atardir': '/NCEPDEV/$HPSS_PROJECT/1year/$USER/$machine/scratch/$PSLOT', } return info @@ -125,6 +129,8 @@ def wcoss_dell_p3p5(self): 'chgrp_rstprod': 'YES', 'chgrp_cmd': 'chgrp rstprod', 'hpssarch': 'YES', + 'localarch': 'NO', + 'atardir': '/NCEPDEV/$HPSS_PROJECT/1year/$USER/$machine/scratch/$PSLOT', } return info @@ -149,6 +155,8 @@ def hera(self): 'chgrp_rstprod': 'YES', 'chgrp_cmd': 'chgrp rstprod', 'hpssarch': 'YES', + 'localarch': 'NO', + 'atardir': '/NCEPDEV/$HPSS_PROJECT/1year/$USER/$machine/scratch/$PSLOT', } return info @@ -173,6 +181,8 @@ def orion(self): 'chgrp_rstprod': 'YES', 'chgrp_cmd': 'chgrp rstprod', 'hpssarch': 'NO', + 'localarch': 'NO', + 'atardir': '$NOSCRUB/archive_rotdir/$PSLOT', } return info @@ -403,7 +413,10 @@ def get_resources(machine, cfg, task, reservation, cdump='gdas'): ppn = cfg[f'npe_node_{ltask}'] if machine in [ 'WCOSS_DELL_P3', 'HERA', 'ORION', 'JET' ]: - threads = cfg[f'nth_{ltask}'] + if cdump in ['gfs'] and f'nth_{task}_gfs' in cfg.keys(): + threads = cfg[f'nth_{ltask}_gfs'] + else: + threads = cfg[f'nth_{ltask}'] nodes = np.int(np.ceil(np.float(tasks) / np.float(ppn)))