Skip to content

Commit

Permalink
Merge branch 'develop' at f8867d3 into feature/ufsda
Browse files Browse the repository at this point in the history
  • Loading branch information
RussTreadon-NOAA committed Feb 14, 2022
2 parents bf02216 + f8867d3 commit 29c9ce2
Show file tree
Hide file tree
Showing 16 changed files with 158 additions and 88 deletions.
10 changes: 7 additions & 3 deletions env/HERA.env
Original file line number Diff line number Diff line change
Expand Up @@ -130,14 +130,18 @@ elif [ $step = "eupd" ]; then

elif [ $step = "fcst" ]; then

#PEs and PEs/node can differ for GFS and GDAS forecasts if threading differs
if [[ $CDUMP == "gfs" ]]; then
npe_fcst=$npe_fcst_gfs
npe_node_fcst=$npe_node_fcst_gfs
nth_fv3=$nth_fv3_gfs
fi

nth_max=$(($npe_node_max / $npe_node_fcst))

export NTHREADS_FV3=${nth_fv3:-$nth_max}
[[ $NTHREADS_FV3 -gt $nth_max ]] && export NTHREADS_FV3=$nth_max
export cores_per_node=$npe_node_max
if [[ $CDUMP == "gfs" ]]; then
npe_fcst=$npe_fcst_gfs
fi
export APRUN_FV3="$launcher -n $npe_fcst"

export NTHREADS_REGRID_NEMSIO=${nth_regrid_nemsio:-1}
Expand Down
7 changes: 7 additions & 0 deletions env/JET.env
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,13 @@ elif [ $step = "eupd" ]; then

elif [ $step = "fcst" ]; then

#PEs and PEs/node can differ for GFS and GDAS forecasts if threading differs
if [[ $CDUMP == "gfs" ]]; then
npe_fcst=$npe_fcst_gfs
npe_node_fcst=$npe_node_fcst_gfs
nth_fv3=$nth_fv3_gfs
fi

nth_max=$(($npe_node_max / $npe_node_fcst))

export NTHREADS_FV3=${nth_fv3:-$nth_max}
Expand Down
10 changes: 7 additions & 3 deletions env/ORION.env
Original file line number Diff line number Diff line change
Expand Up @@ -139,14 +139,18 @@ elif [ $step = "eupd" ]; then

elif [ $step = "fcst" ]; then

#PEs and PEs/node can differ for GFS and GDAS forecasts if threading differs
if [[ $CDUMP == "gfs" ]]; then
npe_fcst=$npe_fcst_gfs
npe_node_fcst=$npe_node_fcst_gfs
nth_fv3=$nth_fv3_gfs
fi

nth_max=$(($npe_node_max / $npe_node_fcst))

export NTHREADS_FV3=${nth_fv3:-$nth_max}
[[ $NTHREADS_FV3 -gt $nth_max ]] && export NTHREADS_FV3=$nth_max
export cores_per_node=$npe_node_max
if [[ $CDUMP == "gfs" ]]; then
npe_fcst=$npe_fcst_gfs
fi
export APRUN_FV3="$launcher -n $npe_fcst"

export NTHREADS_REGRID_NEMSIO=${nth_regrid_nemsio:-1}
Expand Down
13 changes: 8 additions & 5 deletions env/WCOSS_C.env
Original file line number Diff line number Diff line change
Expand Up @@ -115,17 +115,20 @@ elif [ $step = "eupd" ]; then

elif [ $step = "fcst" ]; then

#PEs and PEs/node can differ for GFS and GDAS forecasts if threading differs
if [[ $CDUMP == "gfs" ]]; then
npe_fcst=$npe_fcst_gfs
npe_node_fcst=$npe_node_fcst_gfs
nth_fv3=$nth_fv3_gfs
fi

nth_max=$(($npe_node_max / $npe_node_fcst))

export NTHREADS_FV3=${nth_fv3:-$nth_max}
[[ $NTHREADS_FV3 -gt $nth_max ]] && export NTHREADS_FV3=$nth_max
export cores_per_node=$npe_node_max
#export APRUN_FV3="$launcher -j 1 -n ${npe_fv3:-$npe_fcst} -N $npe_node_fcst -d $NTHREADS_FV3 -cc depth"
if [ $CDUMP = "gdas" ]; then
export APRUN_FV3="$launcher -j 1 -n ${npe_fcst} -N $npe_node_fcst -d $NTHREADS_FV3 -cc depth"
else
export APRUN_FV3="$launcher -j 1 -n ${npe_fcst_gfs} -N $npe_node_fcst -d $NTHREADS_FV3 -cc depth"
fi
export APRUN_FV3="$launcher -j 1 -n ${npe_fcst} -N $npe_node_fcst -d $NTHREADS_FV3 -cc depth"

export NTHREADS_REGRID_NEMSIO=${nth_regrid_nemsio:-1}
[[ $NTHREADS_REGRID_NEMSIO -gt $nth_max ]] && export NTHREADS_REGRID_NEMSIO=$nth_max
Expand Down
15 changes: 9 additions & 6 deletions env/WCOSS_DELL_P3.env
Original file line number Diff line number Diff line change
Expand Up @@ -120,17 +120,20 @@ elif [ $step = "eupd" ]; then

elif [ $step = "fcst" ]; then

#PEs and PEs/node can differ for GFS and GDAS forecasts if threading differs
if [[ $CDUMP == "gfs" ]]; then
npe_fcst=$npe_fcst_gfs
npe_node_fcst=$npe_node_fcst_gfs
nth_fv3=$nth_fv3_gfs
fi

nth_max=$(($npe_node_max / $npe_node_fcst))

export NTHREADS_FV3=${nth_fv3:-$nth_max}
[[ $NTHREADS_FV3 -gt $nth_max ]] && export NTHREADS_FV3=$nth_max
export cores_per_node=$npe_node_max
if [ $CDUMP = "gdas" ]; then
#export APRUN_FV3="$launcher ${npe_fv3:-${npe_fcst:-$PBS_NP}}"
export APRUN_FV3="$launcher ${npe_fcst:-$PBS_NP}"
else
export APRUN_FV3="$launcher ${npe_fcst_gfs:-$PBS_NP}"
fi
#export APRUN_FV3="$launcher ${npe_fv3:-${npe_fcst:-$PBS_NP}}"
export APRUN_FV3="$launcher ${npe_fcst:-$PBS_NP}"
export NTHREADS_REGRID_NEMSIO=${nth_regrid_nemsio:-1}
[[ $NTHREADS_REGRID_NEMSIO -gt $nth_max ]] && export NTHREADS_REGRID_NEMSIO=$nth_max
export APRUN_REGRID_NEMSIO="$launcher $LEVS"
Expand Down
20 changes: 14 additions & 6 deletions jobs/rocoto/arch.sh
Original file line number Diff line number Diff line change
Expand Up @@ -129,10 +129,18 @@ fi


###############################################################
# Archive data to HPSS
if [ $HPSSARCH = "YES" ]; then
# Archive data either to HPSS or locally
if [[ $HPSSARCH = "YES" || $LOCALARCH = "YES" ]]; then
###############################################################

# --set the archiving command and create local directories, if necessary
TARCMD="htar"
if [[ $LOCALARCH = "YES" ]]; then
TARCMD="tar"
[ ! -d $ATARDIR/$CDATE ] && mkdir -p $ATARDIR/$CDATE
[ ! -d $ATARDIR/$CDATE_MOS -a -d $ROTDIR/gfsmos.$PDY_MOS -a $cyc -eq 18 ] && mkdir -p $ATARDIR/$CDATE_MOS
fi

#--determine when to save ICs for warm start and forecast-only runs
SAVEWARMICA="NO"
SAVEWARMICB="NO"
Expand Down Expand Up @@ -208,10 +216,10 @@ if [ $CDUMP = "gfs" ]; then

#--save mdl gfsmos output from all cycles in the 18Z archive directory
if [ -d gfsmos.$PDY_MOS -a $cyc -eq 18 ]; then
htar -P -cvf $ATARDIR/$CDATE_MOS/gfsmos.tar ./gfsmos.$PDY_MOS
$TARCMD -P -cvf $ATARDIR/$CDATE_MOS/gfsmos.tar ./gfsmos.$PDY_MOS
status=$?
if [ $status -ne 0 -a $CDATE -ge $firstday ]; then
echo "HTAR $CDATE gfsmos.tar failed"
echo "$(echo $TARCMD | tr 'a-z' 'A-Z') $CDATE gfsmos.tar failed"
exit $status
fi
fi
Expand Down Expand Up @@ -240,10 +248,10 @@ fi
# Turn on extended globbing options
shopt -s extglob
for targrp in $targrp_list; do
htar -P -cvf $ATARDIR/$CDATE/${targrp}.tar $(cat $ARCH_LIST/${targrp}.txt)
$TARCMD -P -cvf $ATARDIR/$CDATE/${targrp}.tar $(cat $ARCH_LIST/${targrp}.txt)
status=$?
if [ $status -ne 0 -a $CDATE -ge $firstday ]; then
echo "HTAR $CDATE ${targrp}.tar failed"
echo "$(echo $TARCMD | tr 'a-z' 'A-Z') $CDATE ${targrp}.tar failed"
exit $status
fi
done
Expand Down
34 changes: 24 additions & 10 deletions jobs/rocoto/earc.sh
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,14 @@ cd $ROTDIR
###################################################################
# ENSGRP > 0 archives a group of ensemble members
firstday=$($NDATE +24 $SDATE)
if [[ $ENSGRP -gt 0 ]] && [[ $HPSSARCH = "YES" ]]; then
if [[ $ENSGRP -gt 0 ]] && [[ $HPSSARCH = "YES" || $LOCALARCH = "YES" ]]; then

#--set the archiving command and create local directories, if necessary
TARCMD="htar"
if [[ $LOCALARCH = "YES" ]]; then
TARCMD="tar"
[ ! -d $ATARDIR/$CDATE ] && mkdir -p $ATARDIR/$CDATE
fi

#--determine when to save ICs for warm start
SAVEWARMICA="NO"
Expand All @@ -84,27 +91,27 @@ if [[ $ENSGRP -gt 0 ]] && [[ $HPSSARCH = "YES" ]]; then

if [ $CDATE -gt $SDATE ]; then # Don't run for first half cycle

htar -P -cvf $ATARDIR/$CDATE/enkf${CDUMP}_grp${ENSGRP}.tar $(cat $ARCH_LIST/enkf${CDUMP}_grp${n}.txt)
$TARCMD -P -cvf $ATARDIR/$CDATE/enkf${CDUMP}_grp${ENSGRP}.tar $(cat $ARCH_LIST/enkf${CDUMP}_grp${n}.txt)
status=$?
if [ $status -ne 0 -a $CDATE -ge $firstday ]; then
echo "HTAR $CDATE enkf${CDUMP}_grp${ENSGRP}.tar failed"
echo "$(echo $TARCMD | tr 'a-z' 'A-Z') $CDATE enkf${CDUMP}_grp${ENSGRP}.tar failed"
exit $status
fi

if [ $SAVEWARMICA = "YES" -a $cyc -eq $EARCINC_CYC ]; then
htar -P -cvf $ATARDIR/$CDATE/enkf${CDUMP}_restarta_grp${ENSGRP}.tar $(cat $ARCH_LIST/enkf${CDUMP}_restarta_grp${n}.txt)
$TARCMD -P -cvf $ATARDIR/$CDATE/enkf${CDUMP}_restarta_grp${ENSGRP}.tar $(cat $ARCH_LIST/enkf${CDUMP}_restarta_grp${n}.txt)
status=$?
if [ $status -ne 0 ]; then
echo "HTAR $CDATE enkf${CDUMP}_restarta_grp${ENSGRP}.tar failed"
echo "$(echo $TARCMD | tr 'a-z' 'A-Z') $CDATE enkf${CDUMP}_restarta_grp${ENSGRP}.tar failed"
exit $status
fi
fi

if [ $SAVEWARMICB = "YES" -a $cyc -eq $EARCICS_CYC ]; then
htar -P -cvf $ATARDIR/$CDATE/enkf${CDUMP}_restartb_grp${ENSGRP}.tar $(cat $ARCH_LIST/enkf${CDUMP}_restartb_grp${n}.txt)
$TARCMD -P -cvf $ATARDIR/$CDATE/enkf${CDUMP}_restartb_grp${ENSGRP}.tar $(cat $ARCH_LIST/enkf${CDUMP}_restartb_grp${n}.txt)
status=$?
if [ $status -ne 0 ]; then
echo "HTAR $CDATE enkf${CDUMP}_restartb_grp${ENSGRP}.tar failed"
echo "$(echo $TARCMD | tr 'a-z' 'A-Z') $CDATE enkf${CDUMP}_restartb_grp${ENSGRP}.tar failed"
exit $status
fi
fi
Expand All @@ -118,12 +125,19 @@ fi
# ENSGRP 0 archives ensemble means and copy data to online archive
if [ $ENSGRP -eq 0 ]; then

if [ $HPSSARCH = "YES" ]; then
if [[ $HPSSARCH = "YES" || $LOCALARCH = "YES" ]]; then

#--set the archiving command and create local directories, if necessary
TARCMD="htar"
if [[ $LOCALARCH = "YES" ]]; then
TARCMD="tar"
[ ! -d $ATARDIR/$CDATE ] && mkdir -p $ATARDIR/$CDATE
fi

htar -P -cvf $ATARDIR/$CDATE/enkf${CDUMP}.tar $(cat $ARCH_LIST/enkf${CDUMP}.txt)
$TARCMD -P -cvf $ATARDIR/$CDATE/enkf${CDUMP}.tar $(cat $ARCH_LIST/enkf${CDUMP}.txt)
status=$?
if [ $status -ne 0 -a $CDATE -ge $firstday ]; then
echo "HTAR $CDATE enkf${CDUMP}.tar failed"
echo "$(echo $TARCMD | tr 'a-z' 'A-Z') $CDATE enkf${CDUMP}.tar failed"
exit $status
fi
fi
Expand Down
7 changes: 6 additions & 1 deletion parm/config/config.base.emc.dyn
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ export RUNDIR="$STMP/RUNDIRS/$PSLOT"
export DATAROOT="$RUNDIR/$CDATE/$CDUMP"
export ARCDIR="$NOSCRUB/archive/$PSLOT"
export ICSDIR="@ICSDIR@"
export ATARDIR="/NCEPDEV/$HPSS_PROJECT/1year/$USER/$machine/scratch/$PSLOT"
export ATARDIR="@ATARDIR@"

# Commonly defined parameters in JJOBS
export envir=${envir:-"prod"}
Expand Down Expand Up @@ -350,6 +350,11 @@ export DO_VSDB="YES" # Run VSDB package - set VSDB settings in config.vr

# Archiving options
export HPSSARCH="@HPSSARCH@" # save data to HPSS archive
export LOCALARCH="@LOCALARCH@" # save data to local archive
if [[ $HPSSARCH = "YES" ]] && [[ $LOCALARCH = "YES" ]]; then
echo "Both HPSS and local archiving selected. Please choose one or the other."
exit 2
fi
export ARCH_CYC=00 # Archive data at this cycle for warm_start capability
export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability
export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability
Expand Down
1 change: 0 additions & 1 deletion parm/config/config.fcst
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ echo "BEGIN: config.fcst"

# Source model specific information that is resolution dependent
. $EXPDIR/config.fv3 $CASE
[[ "$CDUMP" == "gfs" ]] && export nth_fv3=$nth_fv3_gfs

# Turn off waves if not used for this CDUMP
case $WAVE_CDUMP in
Expand Down
3 changes: 3 additions & 0 deletions parm/config/config.resources
Original file line number Diff line number Diff line change
Expand Up @@ -234,8 +234,11 @@ elif [ $step = "fcst" ]; then
NTASKS_TOT=$ATMPETS

export nth_fcst=${nth_fv3:-2}
export nth_fcst_gfs=${nth_fv3_gfs:-2}

export npe_node_fcst=$(echo "$npe_node_max / $nth_fcst" | bc)
export npe_node_fcst_gfs=$(echo "$npe_node_max / $nth_fcst_gfs" | bc)

if [[ "$machine" == "WCOSS_C" ]]; then export memory_fcst="1024M"; fi

if [[ $DO_WAVE == "YES" ]]; then
Expand Down
50 changes: 25 additions & 25 deletions sorc/link_workflow.sh
Original file line number Diff line number Diff line change
Expand Up @@ -204,41 +204,41 @@ cd ${pwd}/../fix ||exit 8
cd gdas
$LINK ../../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gdas.v1.0.0/fix/gdas_minmon_cost.txt .
$LINK ../../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gdas.v1.0.0/fix/gdas_minmon_gnorm.txt .
$LINK ../../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon.v2.0.0/fix/gdas_oznmon_base.tar .
$LINK ../../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon.v2.0.0/fix/gdas_oznmon_satype.txt .
$LINK ../../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon.v3.0.0/fix/gdas_radmon_base.tar .
$LINK ../../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon.v3.0.0/fix/gdas_radmon_satype.txt .
$LINK ../../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon.v3.0.0/fix/gdas_radmon_scaninfo.txt .
$LINK ../../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon/fix/gdas_oznmon_base.tar .
$LINK ../../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon/fix/gdas_oznmon_satype.txt .
$LINK ../../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_base.tar .
$LINK ../../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_satype.txt .
$LINK ../../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_scaninfo.txt .
cd ${pwd}/../jobs ||exit 8
$LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gdas.v1.0.0/jobs/JGDAS_ATMOS_VMINMON .
$LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gfs.v1.0.0/jobs/JGFS_ATMOS_VMINMON .
$LINK ../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon.v2.0.0/jobs/JGDAS_ATMOS_VERFOZN .
$LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon.v3.0.0/jobs/JGDAS_ATMOS_VERFRAD .
$LINK ../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon/jobs/JGDAS_ATMOS_VERFOZN .
$LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/jobs/JGDAS_ATMOS_VERFRAD .
cd ${pwd}/../parm ||exit 8
[[ -d mon ]] && rm -rf mon
mkdir -p mon
cd mon
$LINK ../../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon.v3.0.0/parm/gdas_radmon.parm da_mon.parm
$LINK ../../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/parm/gdas_radmon.parm da_mon.parm
# $LINK ../../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gdas.v1.0.0/parm/gdas_minmon.parm .
# $LINK ../../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gfs.v1.0.0/parm/gfs_minmon.parm .
$LINK ../../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon.v2.0.0/parm/gdas_oznmon.parm .
# $LINK ../../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon.v3.0.0/parm/gdas_radmon.parm .
$LINK ../../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon/parm/gdas_oznmon.parm .
# $LINK ../../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/parm/gdas_radmon.parm .
cd ${pwd}/../scripts ||exit 8
$LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gdas.v1.0.0/scripts/exgdas_atmos_vminmon.sh .
$LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gfs.v1.0.0/scripts/exgfs_atmos_vminmon.sh .
$LINK ../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon.v2.0.0/scripts/exgdas_atmos_verfozn.sh .
$LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon.v3.0.0/scripts/exgdas_atmos_verfrad.sh .
$LINK ../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon/scripts/exgdas_atmos_verfozn.sh .
$LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/scripts/exgdas_atmos_verfrad.sh .
cd ${pwd}/../ush ||exit 8
$LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/minmon_shared.v1.0.1/ush/minmon_xtrct_costs.pl .
$LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/minmon_shared.v1.0.1/ush/minmon_xtrct_gnorms.pl .
$LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/minmon_shared.v1.0.1/ush/minmon_xtrct_reduct.pl .
$LINK ../sorc/gsi.fd/util/Ozone_Monitor/nwprod/oznmon_shared.v2.0.0/ush/ozn_xtrct.sh .
$LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared.v3.0.0/ush/radmon_ck_stdout.sh .
$LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared.v3.0.0/ush/radmon_err_rpt.sh .
$LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared.v3.0.0/ush/radmon_verf_angle.sh .
$LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared.v3.0.0/ush/radmon_verf_bcoef.sh .
$LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared.v3.0.0/ush/radmon_verf_bcor.sh .
$LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared.v3.0.0/ush/radmon_verf_time.sh .
$LINK ../sorc/gsi.fd/util/Ozone_Monitor/nwprod/oznmon_shared/ush/ozn_xtrct.sh .
$LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_ck_stdout.sh .
$LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_err_rpt.sh .
$LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_angle.sh .
$LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_bcoef.sh .
$LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_bcor.sh .
$LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_time.sh .


#------------------------------
Expand Down Expand Up @@ -340,22 +340,22 @@ cd ${pwd}/../sorc || exit 8
$SLINK gsi.fd/src/ncdiag ncdiag_cat.fd

[[ -d oznmon_horiz.fd ]] && rm -rf oznmon_horiz.fd
$SLINK gsi.fd/util/Ozone_Monitor/nwprod/oznmon_shared.v2.0.0/sorc/oznmon_horiz.fd oznmon_horiz.fd
$SLINK gsi.fd/util/Ozone_Monitor/nwprod/oznmon_shared/sorc/oznmon_horiz.fd oznmon_horiz.fd

[[ -d oznmon_time.fd ]] && rm -rf oznmon_time.fd
$SLINK gsi.fd/util/Ozone_Monitor/nwprod/oznmon_shared.v2.0.0/sorc/oznmon_time.fd oznmon_time.fd
$SLINK gsi.fd/util/Ozone_Monitor/nwprod/oznmon_shared/sorc/oznmon_time.fd oznmon_time.fd

[[ -d radmon_angle.fd ]] && rm -rf radmon_angle.fd
$SLINK gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared.v3.0.0/sorc/verf_radang.fd radmon_angle.fd
$SLINK gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radang.fd radmon_angle.fd

[[ -d radmon_bcoef.fd ]] && rm -rf radmon_bcoef.fd
$SLINK gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared.v3.0.0/sorc/verf_radbcoef.fd radmon_bcoef.fd
$SLINK gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radbcoef.fd radmon_bcoef.fd

[[ -d radmon_bcor.fd ]] && rm -rf radmon_bcor.fd
$SLINK gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared.v3.0.0/sorc/verf_radbcor.fd radmon_bcor.fd
$SLINK gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radbcor.fd radmon_bcor.fd

[[ -d radmon_time.fd ]] && rm -rf radmon_time.fd
$SLINK gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared.v3.0.0/sorc/verf_radtime.fd radmon_time.fd
$SLINK gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radtime.fd radmon_time.fd

[[ -d recentersigp.fd ]] && rm -rf recentersigp.fd
$SLINK gsi.fd/util/EnKF/gfs/src/recentersigp.fd recentersigp.fd
Expand Down
1 change: 0 additions & 1 deletion ush/forecast_predet.sh
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,6 @@ FV3_GFS_predet(){
#file and the value of npe_node_fcst is not correctly defined when using more than
#one thread and sets NTHREADS_FV3=1 even when the number of threads is appropraitely >1
#NTHREADS_FV3=${NTHREADS_FV3:-${NTHREADS_FCST:-${nth_fv3:-1}}}
NTHREADS_FV3=${nth_fv3:-1}
cores_per_node=${cores_per_node:-${npe_node_fcst:-24}}
ntiles=${ntiles:-6}
if [ $MEMBER -lt 0 ]; then
Expand Down
Loading

0 comments on commit 29c9ce2

Please sign in to comment.