From 22977c9aae7016e50362947fd6ef36385d736ce2 Mon Sep 17 00:00:00 2001 From: Walter Kolczynski - NOAA Date: Tue, 23 Aug 2022 11:04:08 -0400 Subject: [PATCH 1/7] Fix product generation at some fcst hrs (#988) 0.5 and 1.0 degree grib files were not being generated for most two-digit forecast hours due to a problem using a zero-padded number. Fixes #985 --- ush/fv3gfs_downstream_nems.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ush/fv3gfs_downstream_nems.sh b/ush/fv3gfs_downstream_nems.sh index 4ef17a9785..3b15b00cb2 100755 --- a/ush/fv3gfs_downstream_nems.sh +++ b/ush/fv3gfs_downstream_nems.sh @@ -78,7 +78,7 @@ else export paramlist=${paramlist:-$PARMpost/global_1x1_paramlist_g2} export paramlistb=${paramlistb:-$PARMpost/global_master-catchup_parmlist_g2} export fhr3=$(printf "%03d" ${FH}) - if (( fhr3%FHOUT_PGB == 0 )); then + if (( FH%FHOUT_PGB == 0 )); then export PGBS=YES fi fi From da164f8b00c68d1309759645a57f0df744d46ef2 Mon Sep 17 00:00:00 2001 From: Walter Kolczynski - NOAA Date: Tue, 23 Aug 2022 11:05:23 -0400 Subject: [PATCH 2/7] Add GSI monitor scripts (#969) As part of the overarching goal of moving global workflow scripts out of component repositories and under global workflow, all of the scripts currently linked in GSI monitor are added to global workflow. Scripts have been updated to reflect standardization already made to other workflow scripts (preamble, no backticks, etc.). See 7aa637 for those modifications specifically. Related GSI-Montitor issue: NOAA-EMC/GSI-Monitor/pull/23 Fixes #967 --- .gitignore | 19 -- jobs/JGDAS_ATMOS_VERFOZN | 110 +++++++ jobs/JGDAS_ATMOS_VERFRAD | 131 ++++++++ jobs/JGDAS_ATMOS_VMINMON | 95 ++++++ jobs/JGFS_ATMOS_VMINMON | 93 ++++++ scripts/exgdas_atmos_verfozn.sh | 91 +++++ scripts/exgdas_atmos_verfrad.sh | 222 +++++++++++++ scripts/exgdas_atmos_vminmon.sh | 119 +++++++ scripts/exgfs_atmos_vminmon.sh | 116 +++++++ sorc/link_workflow.sh | 21 -- ush/minmon_xtrct_costs.pl | 231 +++++++++++++ ush/minmon_xtrct_gnorms.pl | 444 +++++++++++++++++++++++++ ush/minmon_xtrct_reduct.pl | 89 +++++ ush/ozn_xtrct.sh | 266 +++++++++++++++ ush/radmon_diag_ck.sh | 175 ++++++++++ ush/radmon_err_rpt.sh | 194 +++++++++++ ush/radmon_verf_angle.sh | 234 +++++++++++++ ush/radmon_verf_bcoef.sh | 233 +++++++++++++ ush/radmon_verf_bcor.sh | 226 +++++++++++++ ush/radmon_verf_time.sh | 567 ++++++++++++++++++++++++++++++++ ush/rstprod.sh | 19 ++ 21 files changed, 3655 insertions(+), 40 deletions(-) create mode 100755 jobs/JGDAS_ATMOS_VERFOZN create mode 100755 jobs/JGDAS_ATMOS_VERFRAD create mode 100755 jobs/JGDAS_ATMOS_VMINMON create mode 100755 jobs/JGFS_ATMOS_VMINMON create mode 100755 scripts/exgdas_atmos_verfozn.sh create mode 100755 scripts/exgdas_atmos_verfrad.sh create mode 100755 scripts/exgdas_atmos_vminmon.sh create mode 100755 scripts/exgfs_atmos_vminmon.sh create mode 100755 ush/minmon_xtrct_costs.pl create mode 100755 ush/minmon_xtrct_gnorms.pl create mode 100755 ush/minmon_xtrct_reduct.pl create mode 100755 ush/ozn_xtrct.sh create mode 100755 ush/radmon_diag_ck.sh create mode 100755 ush/radmon_err_rpt.sh create mode 100755 ush/radmon_verf_angle.sh create mode 100755 ush/radmon_verf_bcoef.sh create mode 100755 ush/radmon_verf_bcor.sh create mode 100755 ush/radmon_verf_time.sh create mode 100755 ush/rstprod.sh diff --git a/.gitignore b/.gitignore index df54a09892..937b7bae1c 100644 --- a/.gitignore +++ b/.gitignore @@ -136,10 +136,6 @@ sorc/wafs_setmissing.fd #------------------------------ # jobs symlinks jobs/JGDAS_ATMOS_GLDAS -jobs/JGDAS_ATMOS_VERFOZN -jobs/JGDAS_ATMOS_VERFRAD -jobs/JGDAS_ATMOS_VMINMON -jobs/JGFS_ATMOS_VMINMON jobs/JGFS_ATMOS_WAFS jobs/JGFS_ATMOS_WAFS_BLENDING jobs/JGFS_ATMOS_WAFS_BLENDING_0P25 @@ -149,10 +145,6 @@ jobs/JGFS_ATMOS_WAFS_GRIB2_0P25 # scripts symlinks scripts/exemcsfc_global_sfc_prep.sh scripts/exgdas_atmos_gldas.sh -scripts/exgdas_atmos_verfozn.sh -scripts/exgdas_atmos_verfrad.sh -scripts/exgdas_atmos_vminmon.sh -scripts/exgfs_atmos_vminmon.sh scripts/exgfs_atmos_wafs_blending.sh scripts/exgfs_atmos_wafs_blending_0p25.sh scripts/exgfs_atmos_wafs_gcip.sh @@ -178,19 +170,8 @@ ush/global_chgres.sh ush/global_chgres_driver.sh ush/global_cycle.sh ush/global_cycle_driver.sh -ush/minmon_xtrct_costs.pl -ush/minmon_xtrct_gnorms.pl -ush/minmon_xtrct_reduct.pl ush/mkwfsgbl.sh -ush/ozn_xtrct.sh -ush/radmon_ck_stdout.sh -ush/radmon_err_rpt.sh -ush/radmon_verf_angle.sh -ush/radmon_verf_bcoef.sh -ush/radmon_verf_bcor.sh -ush/radmon_verf_time.sh ush/ufsda -ush/rstprod.sh ush/wafs_blending.sh ush/wafs_grib2.regrid.sh ush/wafs_intdsk.sh diff --git a/jobs/JGDAS_ATMOS_VERFOZN b/jobs/JGDAS_ATMOS_VERFOZN new file mode 100755 index 0000000000..aa1454440d --- /dev/null +++ b/jobs/JGDAS_ATMOS_VERFOZN @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +############################################################# +# Set up environment for GDAS Ozone Monitor job +############################################################# +source "$HOMEgfs/ush/preamble.sh" + +############################### +# Specify NET, RUN, and COMPONENT name +############################## +export NET=${NET:-gfs} +export RUN=${RUN:-gdas} +export COMPONENT=${COMPONENT:-atmos} + +########################################################### +# obtain unique process id (pid) and make temp directories +########################################################### +export pid=$$ +export outid=${outid:-"LL$job"} +export DATA=${DATA:-${DATAROOT}/${jobid:?}} + +export OZNMON_SUFFIX=${OZNMON_SUFFIX:-${NET}} + +mkdir -p ${DATA} +cd ${DATA} + + +#################################### +# Determine Job Output Name on System +#################################### +export pgmout="OUTPUT.${pid}" +export pgmerr=errfile +export cycle=t${cyc}z + + +#--------------------------------------------- +# Specify Execution Areas +# +export HOMEgfs_ozn=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} +export HOMEgdas_ozn=${HOMEgfs_ozn:-${NWROOT}/gfs.${gfs_ver}} +export PARMgdas_ozn=${PARMgfs_ozn:-$HOMEgfs_ozn/parm/mon} +export SCRgdas_ozn=${SCRgfs_ozn:-$HOMEgfs_ozn/scripts} +export FIXgdas_ozn=${FIXgfs_ozn:-$HOMEgfs_ozn/fix/gdas} + +export HOMEoznmon=${HOMEoznmon:-${HOMEgfs_ozn}} +export EXECoznmon=${EXECoznmon:-$HOMEoznmon/exec} +export FIXoznmon=${FIXoznmon:-${HOMEoznmon}/fix} +export USHoznmon=${USHoznmon:-$HOMEoznmon/ush} + + +#----------------------------------- +# source the parm file +# +. ${PARMgdas_ozn}/gdas_oznmon.parm + + +############################################# +# determine PDY and cyc for previous cycle +############################################# + +cdate=$(${NDATE} -6 ${PDY}${cyc}) +echo 'pdate = ${pdate}' + +export P_PDY=$(echo ${cdate} | cut -c1-8) +export p_cyc=$(echo ${cdate} | cut -c9-10) + +#--------------------------------------------- +# OZN_TANKDIR - WHERE OUTPUT DATA WILL RESIDE +# +export OZN_TANKDIR=${OZN_TANKDIR:-$(compath.py ${envir}/${NET}/${gfs_ver})} +export TANKverf_ozn=${TANKverf_ozn:-${OZN_TANKDIR}/${RUN}.${PDY}/${cyc}/${COMPONENT}/oznmon} +export TANKverf_oznM1=${TANKverf_oznM1:-${OZN_TANKDIR}/${RUN}.${P_PDY}/${p_cyc}/${COMPONENT}/oznmon} +export COM_IN=${COM_IN:-$(compath.py ${envir}/${NET}/${gfs_ver})} +export COMIN=${COMIN:-${COM_IN}/${RUN}.${PDY}/${cyc}/${COMPONENT}} + +if [[ ! -d ${TANKverf_ozn} ]]; then + mkdir -p -m 775 ${TANKverf_ozn} +fi + +#--------------------------------------- +# set up validation file +# +if [[ ${VALIDATE_DATA} -eq 1 ]]; then + export ozn_val_file=${ozn_val_file:-${FIXgdas_ozn}/gdas_oznmon_base.tar} +fi + +#--------------------------------------- +# Set necessary environment variables +# +export OZN_AREA=${OZN_AREA:-glb} +export oznstat=${oznstat:-$COMIN/gdas.t${cyc}z.oznstat} + + +#------------------------------------------------------- +# Execute the script. +# +${OZNMONSH:-${SCRgdas_ozn}/exgdas_atmos_verfozn.sh} ${PDY} ${cyc} +err=$? +[[ $err -ne 0 ]] && exit $err + + +################################ +# Remove the Working Directory +################################ +KEEPDATA=${KEEPDATA:-YES} +cd $DATAROOT +if [ ${KEEPDATA} = NO ] ; then + rm -rf $DATA +fi + diff --git a/jobs/JGDAS_ATMOS_VERFRAD b/jobs/JGDAS_ATMOS_VERFRAD new file mode 100755 index 0000000000..accf9f88f8 --- /dev/null +++ b/jobs/JGDAS_ATMOS_VERFRAD @@ -0,0 +1,131 @@ +#! /usr/bin/env bash + +############################################################# +# Set up environment for GDAS Radiance Monitor job +############################################################# +source "$HOMEgfs/ush/preamble.sh" + +############################### +# Specify NET, RUN, and COMPONENT name +############################## +export NET=${NET:-gfs} +export RUN=${RUN:-gdas} +export COMPONENT=${COMPONENT:-atmos} + +########################################################### +# obtain unique process id (pid) and make temp directories +########################################################### +export pid=$$ +export outid=${outid:-"LL$job"} +export RAD_DATA_IN=${RAD_DATA_IN:-${DATAROOT}/${jobid:?}} + +export RADMON_SUFFIX=${RADMON_SUFFIX:-${RUN}} +export CYCLE_INTERVAL=${CYCLE_INTERVAL:-6} + +mkdir -p $RAD_DATA_IN +cd $RAD_DATA_IN + +#################################### +# Determine Job Output Name on System +#################################### +export pgmout="OUTPUT.${pid}" +export pgmerr=errfile +export cycle=t${cyc}z + +############################################## +# Specify Execution Areas +############################################## +export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} +export SCRgfs=${SCRgfs:-$HOMEgfs/scripts} + +export FIXgdas=${FIXgdas:-$HOMEgfs/fix/gdas} +export PARMmon=${PARMmon:-$HOMEgfs/parm/mon} + +export HOMEradmon=${HOMEradmon:-${HOMEgfs}} +export EXECradmon=${EXECradmon:-$HOMEradmon/exec} +export FIXradmon=${FIXradmon:-${FIXgfs}} +export USHradmon=${USHradmon:-$HOMEradmon/ush} + + +################################### +# source the parm file +################################### +parm_file=${parm_file:-${PARMmon}/da_mon.parm} +. ${parm_file} + + +############################################# +# Run setpdy and initialize PDY variables +############################################# +if [[ $MY_MACHINE != "HERA" && $MY_MACHINE != "hera" ]]; then + setpdy.sh + . ./PDY +fi + +############################################# +# determine PDY and cyc for previous cycle +############################################# + +cdate=$(${NDATE} -6 ${PDY}${cyc}) +echo 'pdate = ${pdate}' + +export P_PDY=$(echo ${cdate} | cut -c1-8) +export p_cyc=$(echo ${cdate} | cut -c9-10) + +############################################# +# COMOUT - WHERE GSI OUTPUT RESIDES +# TANKverf - WHERE OUTPUT DATA WILL RESIDE +############################################# +export TANKverf=${TANKverf:-$(compath.py ${envir}/${NET}/${gfs_ver})} +export TANKverf_rad=${TANKverf_rad:-${TANKverf}/${RUN}.${PDY}/${cyc}/$COMPONENT/radmon} +export TANKverf_radM1=${TANKverf_radM1:-${TANKverf}/${RUN}.${P_PDY}/${p_cyc}/$COMPONENT/radmon} +export COM_IN=${COM_IN:-$(compath.py ${envir}/${NET}/${gfs_ver})} +export COMIN=${COMIN:-${COM_IN}/${RUN}.${PDY}/${cyc}/$COMPONENT} + +################################ +# backwards compatibility for +# gfs v15 which doesn't have +# a $COMPONENT in output path +################################ +if [[ ! -d ${COMIN} ]]; then + export COMIN=${COM_IN}/${RUN}.${PDY}/${cyc} +fi + + +mkdir -p -m 775 $TANKverf_rad + +######################################## +# Set necessary environment variables +######################################## +export RAD_AREA=${RAD_AREA:-glb} + +export biascr=${biascr:-$COMIN/gdas.t${cyc}z.abias} +export radstat=${radstat:-$COMIN/gdas.t${cyc}z.radstat} + +echo " " +echo "JOB HAS STARTED" +echo " " + + +######################################################## +# Execute the script. +${RADMONSH:-${SCRgfs}/exgdas_atmos_verfrad.sh} ${PDY} ${cyc} +err=$? + +if [[ $err -ne 0 ]] ; then + exit $err +else + echo " " + echo "JOB HAS COMPLETED NORMALLY" + echo " " +fi + +################################ +# Remove the Working Directory +################################ +KEEPDATA=${KEEPDATA:-YES} +cd $DATAROOT +if [ ${KEEPDATA} = NO ] ; then + rm -rf $RAD_DATA_IN +fi + diff --git a/jobs/JGDAS_ATMOS_VMINMON b/jobs/JGDAS_ATMOS_VMINMON new file mode 100755 index 0000000000..fcd341e76e --- /dev/null +++ b/jobs/JGDAS_ATMOS_VMINMON @@ -0,0 +1,95 @@ +#! /usr/bin/env bash + +########################################################### +# GDAS Minimization Monitor (MinMon) job +########################################################### +source "$HOMEgfs/ush/preamble.sh" + +############################### +# Specify NET, RUN, and COMPONENT name +############################## +export NET=${NET:-gfs} +export RUN=${RUN:-gdas} +export COMPONENT=${COMPONENT:-atmos} + +########################################################### +# obtain unique process id (pid) and make temp directories +########################################################### +export pid=$$ +export outid=${outid:-"LL$job"} + +export DATA=${DATA:-${DATAROOT}/${jobid:?}} +mkdir -p $DATA +cd $DATA + + +########################################################### +# obtain unique process id (pid) and make temp directories +########################################################### +export MINMON_SUFFIX=${MINMON_SUFFIX:-${NET}} +export m_job=${m_job:-${MINMON_SUFFIX}_mmDE} + + +############################################## +# Specify Package Areas +############################################## +export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} +export SCRgfs=${SCRgfs:-$HOMEgfs/scripts} + +export M_FIXgdas=${M_FIXgdas:-$HOMEgfs/fix/gdas} + +export HOMEminmon=${HOMEminmon:-${HOMEgfs}} +export EXECminmon=${EXECminmon:-$HOMEminmon/exec} +export USHminmon=${USHminmon:-$HOMEminmon/ush} + + +############################################# +# determine PDY and cyc for previous cycle +############################################# + +cdate=$(${NDATE} -6 ${PDY}${cyc}) +echo 'pdate = ${pdate}' + +export P_PDY=$(echo ${cdate} | cut -c1-8) +export p_cyc=$(echo ${cdate} | cut -c9-10) + + +############################################# +# TANKverf - WHERE OUTPUT DATA WILL RESIDE +############################################# +export COM_IN=${COM_IN:-$(compath.py ${envir}/${NET}/${gfs_ver})} + +export M_TANKverf=${M_TANKverf:-${COM_IN}/${RUN}.${PDY}/${cyc}/${COMPONENT}/minmon} +export M_TANKverfM1=${M_TANKverfM1:-${COM_IN}/${RUN}.${P_PDY}/${p_cyc}/${COMPONENT}/minmon} + +export COMIN=${COMIN:-$COM_IN/${RUN}.${PDY}/${cyc}/$COMPONENT} + +mkdir -p -m 775 $M_TANKverf + + + +######################################## +# Set necessary environment variables +######################################## +export CYCLE_INTERVAL=6 +export gsistat=${gsistat:-${COMIN}/gdas.t${cyc}z.gsistat} + + +######################################################## +# Execute the script. +${GMONSH:-$SCRgfs/exgdas_atmos_vminmon.sh} ${PDY} ${cyc} +err=$? +[[ $err -ne 0 ]] && exit $err + + +################################ +# Remove the Working Directory +################################ +KEEPDATA=${KEEPDATA:-NO} +cd ${DATAROOT} + +if [ ${KEEPDATA} = NO ] ; then + rm -rf ${DATA} +fi + + diff --git a/jobs/JGFS_ATMOS_VMINMON b/jobs/JGFS_ATMOS_VMINMON new file mode 100755 index 0000000000..3087d0ecc3 --- /dev/null +++ b/jobs/JGFS_ATMOS_VMINMON @@ -0,0 +1,93 @@ +#! /usr/bin/env bash + +########################################################### +# GFS Minimization Monitor (MinMon) job +########################################################### +source "$HOMEgfs/ush/preamble.sh" + +############################### +# Specify NET and RUN name +############################## +export NET=${NET:-gfs} +export RUN=${RUN:-gfs} +export COMPONENT=${COMPONENT:-atmos} + + +########################################################### +# obtain unique process id (pid) and make temp directories +########################################################### +export pid=$$ +export outid=${outid:-"LL$job"} +export DATA=${DATA:-${DATAROOT}/${jobid:?}} +mkdir -p $DATA +cd $DATA + + +########################################################### +# obtain unique process id (pid) and make temp directories +########################################################### +export MINMON_SUFFIX=${MINMON_SUFFIX:-GFS} +export m_job=${m_job:-${MINMON_SUFFIX}_mmDE} + + +############################################## +# Specify Package Areas +############################################## +export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} +export SCRgfs=${SCRgfs:-$HOMEgfs/scripts} +export M_FIXgfs=${M_FIXgfs:-$HOMEgfs/fix/product} + +export HOMEminmon=${HOMEminmon:-${HOMEgfs}} +export EXECminmon=${EXECminmon:-$HOMEminmon/exec} +export USHminmon=${USHminmon:-$HOMEminmon/ush} + + +############################################# +# determine PDY and cyc for previous cycle +############################################# + +cdate=$(${NDATE} -6 ${PDY}${cyc}) +echo 'pdate = ${pdate}' + +export P_PDY=$(echo ${cdate} | cut -c1-8) +export p_cyc=$(echo ${cdate} | cut -c9-10) + + +############################################# +# TANKverf - WHERE OUTPUT DATA WILL RESIDE +############################################# +export COM_IN=${COM_IN:-$(compath.py ${envir}/${NET}/${gfs_ver})} + +M_TANKverf=${M_TANKverf:-${COM_IN}/${RUN}.${PDY}/${cyc}/${COMPONENT}/minmon} +export M_TANKverfM1=${M_TANKverfM1:-${COM_IN}/${RUN}.${P_PDY}/${p_cyc}/${COMPONENT}/minmon} + +export COMIN=${COMIN:-$COM_IN/${RUN}.${PDY}/${cyc}/$COMPONENT} + +mkdir -p -m 775 $M_TANKverf + + +######################################## +# Set necessary environment variables +######################################## +export CYCLE_INTERVAL=6 +export gsistat=${gsistat:-${COMIN}/gfs.t${cyc}z.gsistat} + + +######################################################## +# Execute the script. +${GMONSH:-$SCRgfs/exgfs_atmos_vminmon.sh} ${PDY} ${cyc} +err=$? +[[ $err -ne 0 ]] && exit $err + + +################################ +# Remove the Working Directory +################################ +KEEPDATA=${KEEPDATA:-NO} +cd ${DATAROOT} + +if [ ${KEEPDATA} = NO ] ; then + rm -rf ${DATA} +fi + + diff --git a/scripts/exgdas_atmos_verfozn.sh b/scripts/exgdas_atmos_verfozn.sh new file mode 100755 index 0000000000..e9a1900085 --- /dev/null +++ b/scripts/exgdas_atmos_verfozn.sh @@ -0,0 +1,91 @@ +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" + +################################################################################ +# exgdas_vrfyozn.sh +# +# This script runs the data extract/validation portion of the Ozone Monitor +# (OznMon) DA package. +# +################################################################################ +err=0 + +#------------------------------------------------------------------------------- +# Set environment +# +export RUN_ENVIR=${RUN_ENVIR:-nco} +export NET=${NET:-gfs} +export RUN=${RUN:-gdas} +export envir=${envir:-prod} +export COMPONENT=${COMPONENT:-atmos} + +# Command line arguments +export PDY=${1:-${PDY:?}} +export cyc=${2:-${cyc:?}} + + +# Other variables +export SATYPE_FILE=${SATYPE_FILE:-$FIXgdas_ozn/gdas_oznmon_satype.txt} +export PDATE=${PDY}${cyc} +export DO_DATA_RPT=${DO_DATA_RPT:-1} +export NCP=${NCP:-/bin/cp} + + +#----------------------------------------------------------------- +# ensure work and TANK dirs exist, verify oznstat is available +# +export OZN_WORK_DIR=${OZN_WORK_DIR:-$(pwd)} + +if [[ ! -d ${OZN_WORK_DIR} ]]; then + mkdir $OZN_WORK_DIR +fi +cd $OZN_WORK_DIR + +if [[ ! -d ${TANKverf_ozn} ]]; then + mkdir -p $TANKverf_ozn +fi + +if [[ -s ${oznstat} ]]; then + echo ${oznstat} is available +fi + + + +data_available=0 + +if [[ -s ${oznstat} ]]; then + data_available=1 + + #------------------------------------------------------------------ + # Copy data files file to local data directory. + # Untar oznstat file. + #------------------------------------------------------------------ + + $NCP $oznstat ./oznstat.$PDATE + + tar -xvf oznstat.$PDATE + rm oznstat.$PDATE + + netcdf=0 + count=$(ls diag* | grep ".nc4" | wc -l) + if [ $count -gt 0 ] ; then + netcdf=1 + for filenc4 in $(ls diag*nc4.gz); do + file=$(echo $filenc4 | cut -d'.' -f1-2).gz + mv $filenc4 $file + done + fi + + export OZNMON_NETCDF=${netcdf} + + ${HOMEoznmon}/ush/ozn_xtrct.sh + err=$? + +else + # oznstat file not found + err=1 +fi + +exit ${err} + diff --git a/scripts/exgdas_atmos_verfrad.sh b/scripts/exgdas_atmos_verfrad.sh new file mode 100755 index 0000000000..3136ca5130 --- /dev/null +++ b/scripts/exgdas_atmos_verfrad.sh @@ -0,0 +1,222 @@ +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" + +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: exgdas_vrfyrad.sh +# Script description: Runs data extract/validation for global radiance diag data +# +# Author: Ed Safford Org: NP23 Date: 2012-01-18 +# +# Abstract: This script runs the data extract/validation portion of the +# RadMon package. +# +# Condition codes +# 0 - no problem encountered +# >0 - some problem encountered +# +################################################################################ + +export VERBOSE=${VERBOSE:-YES} + +export RUN_ENVIR=${RUN_ENVIR:-nco} +export NET=${NET:-gfs} +export RUN=${RUN:-gdas} +export envir=${envir:-prod} +export COMPONENT=${COMPONENT:-atmos} + +# Command line arguments +export PDY=${1:-${PDY:?}} +export cyc=${2:-${cyc:?}} + +# Directories +export COM_IN=${COM_IN:-$(compath.py ${envir}/${NET}/${gfs_ver})} +export COMIN=${COMIN:-$COM_IN/${RUN}.${PDY}/${cyc}/$COMPONENT} + + +# Filenames +export biascr=${biascr:-$COMIN/gdas.t${cyc}z.abias} +export radstat=${radstat:-$COMIN/gdas.t${cyc}z.radstat} +export satype_file=${satype_file:-${FIXgdas}/gdas_radmon_satype.txt} + +# Other variables +export RAD_AREA=${RAD_AREA:-glb} +export MAKE_CTL=${MAKE_CTL:-1} +export MAKE_DATA=${MAKE_DATA:-1} +export USE_ANL=${USE_ANL:-1} +export PDATE=${PDY}${cyc} +export DO_DIAG_RPT=${DO_DIAG_RPT:-1} +export DO_DATA_RPT=${DO_DATA_RPT:-1} +export NCP=${NCP:-/bin/cp} + +########################################################################### +# ensure TANK dir exists, verify radstat and biascr are available +# +if [[ ! -d ${TANKverf_rad} ]]; then + mkdir -p $TANKverf_rad +fi + +if [[ "$VERBOSE" = "YES" ]]; then + if [[ -s ${radstat} ]]; then + echo ${radstat} is available + fi + if [[ -s ${biascr} ]]; then + echo ${biascr} is available + fi +fi +##################################################################### + +data_available=0 +if [[ -s ${radstat} && -s ${biascr} ]]; then + data_available=1 + + #------------------------------------------------------------------ + # Copy data files file to local data directory. + # Untar radstat file. + #------------------------------------------------------------------ + + $NCP $biascr ./biascr.$PDATE + $NCP $radstat ./radstat.$PDATE + + tar -xvf radstat.$PDATE + rm radstat.$PDATE + + #------------------------------------------------------------------ + # SATYPE is the list of expected satellite/instrument sources + # in the radstat file. It should be stored in the $TANKverf + # directory. If it isn't there then use the $FIXgdas copy. In all + # cases write it back out to the radmon.$PDY directory. Add any + # new sources to the list before writing back out. + #------------------------------------------------------------------ + + radstat_satype=$(ls d*ges* | awk -F_ '{ print $2 "_" $3 }') + if [[ "$VERBOSE" = "YES" ]]; then + echo $radstat_satype + fi + + echo satype_file = $satype_file + + #------------------------------------------------------------------ + # Get previous cycle's date, and look for the satype_file. Using + # the previous cycle will get us the previous day's directory if + # the cycle being processed is 00z. + #------------------------------------------------------------------ + if [[ $cyc = "00" ]]; then + use_tankdir=${TANKverf_radM1} + else + use_tankdir=${TANKverf_rad} + fi + + echo satype_file = $satype_file + export SATYPE=$(cat ${satype_file}) + + + #------------------------------------------------------------- + # Update the SATYPE if any new sat/instrument was + # found in $radstat_satype. Write the SATYPE contents back + # to $TANKverf/radmon.$PDY. + #------------------------------------------------------------- + satype_changes=0 + new_satype=$SATYPE + for type in ${radstat_satype}; do + test=$(echo $SATYPE | grep $type | wc -l) + + if [[ $test -eq 0 ]]; then + if [[ "$VERBOSE" = "YES" ]]; then + echo "Found $type in radstat file but not in SATYPE list. Adding it now." + fi + satype_changes=1 + new_satype="$new_satype $type" + fi + done + + + #------------------------------------------------------------------ + # Rename the diag files and uncompress + #------------------------------------------------------------------ + netcdf=0 + + for type in ${SATYPE}; do + + if [[ netcdf -eq 0 && -e diag_${type}_ges.${PDATE}.nc4.${Z} ]]; then + netcdf=1 + fi + + if [[ -f "diag_${type}_ges.${PDATE}*.${Z}" ]]; then + mv diag_${type}_ges.${PDATE}*.${Z} ${type}.${Z} + ${UNCOMPRESS} ./${type}.${Z} + else + echo "WARNING: diag_${type}_ges.${PDATE}*.${Z} not available, skipping" + fi + + if [[ $USE_ANL -eq 1 ]]; then + if [[ -f "diag_${type}_anl.${PDATE}*.${Z}" ]]; then + mv diag_${type}_anl.${PDATE}*.${Z} ${type}_anl.${Z} + ${UNCOMPRESS} ./${type}_anl.${Z} + else + echo "WARNING: diag_${type}_anl.${PDATE}*.${Z} not available, skipping" + fi + fi + done + + export RADMON_NETCDF=$netcdf + + + #------------------------------------------------------------------ + # Run the child sccripts. + #------------------------------------------------------------------ + ${USHradmon}/radmon_verf_angle.sh ${PDATE} + rc_angle=$? + + ${USHradmon}/radmon_verf_bcoef.sh ${PDATE} + rc_bcoef=$? + + ${USHradmon}/radmon_verf_bcor.sh ${PDATE} + rc_bcor=$? + + ${USHradmon}/radmon_verf_time.sh ${PDATE} + rc_time=$? + + #-------------------------------------- + # optionally run clean_tankdir script + # + if [[ ${CLEAN_TANKVERF:-0} -eq 1 ]]; then + ${USHradmon}/clean_tankdir.sh glb 60 + rc_clean_tankdir=$? + echo "rc_clean_tankdir = $rc_clean_tankdir" + fi + +fi + + + +##################################################################### +# Postprocessing + +err=0 +if [[ ${data_available} -ne 1 ]]; then + err=1 +elif [[ $rc_angle -ne 0 ]]; then + err=$rc_angle +elif [[ $rc_bcoef -ne 0 ]]; then + err=$rc_bcoef +elif [[ $rc_bcor -ne 0 ]]; then + err=$rc_bcor +elif [[ $rc_time -ne 0 ]]; then + err=$rc_time +fi + +##################################################################### +# Restrict select sensors and satellites +export CHGRP_CMD=${CHGRP_CMD:-"chgrp ${group_name:-rstprod}"} +rlist="saphir" +for rtype in $rlist; do + if compgen -G "$TANKverf_rad/*${rtype}*" > /dev/null; then + ${CHGRP_CMD} $TANKverf_rad/*${rtype}* + fi +done + +exit ${err} + diff --git a/scripts/exgdas_atmos_vminmon.sh b/scripts/exgdas_atmos_vminmon.sh new file mode 100755 index 0000000000..5d54174bf7 --- /dev/null +++ b/scripts/exgdas_atmos_vminmon.sh @@ -0,0 +1,119 @@ +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" + +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: exgdas_vrfminmon.sh +# Script description: Runs data extract/validation for GSI normalization diag data +# +# Author: Ed Safford Org: NP23 Date: 2015-04-10 +# +# Abstract: This script runs the data extract/validation portion of the +# MinMon package. +# +# Condition codes +# 0 - no problem encountered +# >0 - some problem encountered +# +################################################################################ + + +######################################## +# Set environment +######################################## +export RUN_ENVIR=${RUN_ENVIR:-nco} +export NET=${NET:-gfs} +export RUN=${RUN:-gdas} +export envir=${envir:-prod} + +######################################## +# Command line arguments +######################################## +export PDY=${1:-${PDY:?}} +export cyc=${2:-${cyc:?}} + +######################################## +# Directories +######################################## +export DATA=${DATA:-$(pwd)} + + +######################################## +# Filenames +######################################## +gsistat=${gsistat:-$COMIN/gdas.t${cyc}z.gsistat} +export mm_gnormfile=${gnormfile:-${M_FIXgdas}/gdas_minmon_gnorm.txt} +export mm_costfile=${costfile:-${M_FIXgdas}/gdas_minmon_cost.txt} + +######################################## +# Other variables +######################################## +export MINMON_SUFFIX=${MINMON_SUFFIX:-GDAS} +export PDATE=${PDY}${cyc} +export NCP=${NCP:-/bin/cp} +export pgm=exgdas_vrfminmon.sh + +if [[ ! -d ${DATA} ]]; then + mkdir $DATA +fi +cd $DATA + +###################################################################### + +data_available=0 + +if [[ -s ${gsistat} ]]; then + + data_available=1 + + #----------------------------------------------------------------------- + # Copy the $MINMON_SUFFIX.gnorm_data.txt file to the working directory + # It's ok if it doesn't exist; we'll create a new one if needed. + # + # Note: The logic below is to accomodate two different data storage + # methods. Some parallels (and formerly ops) dump all MinMon data for + # a given day in the same directory (if condition). Ops now separates + # data into ${cyc} subdirectories (elif condition). + #----------------------------------------------------------------------- + if [[ -s ${M_TANKverf}/gnorm_data.txt ]]; then + $NCP ${M_TANKverf}/gnorm_data.txt gnorm_data.txt + elif [[ -s ${M_TANKverfM1}/gnorm_data.txt ]]; then + $NCP ${M_TANKverfM1}/gnorm_data.txt gnorm_data.txt + fi + + + #------------------------------------------------------------------ + # Run the child sccripts. + #------------------------------------------------------------------ + ${USHminmon}/minmon_xtrct_costs.pl ${MINMON_SUFFIX} ${PDY} ${cyc} ${gsistat} dummy + rc_costs=$? + echo "rc_costs = $rc_costs" + + ${USHminmon}/minmon_xtrct_gnorms.pl ${MINMON_SUFFIX} ${PDY} ${cyc} ${gsistat} dummy + rc_gnorms=$? + echo "rc_gnorms = $rc_gnorms" + + ${USHminmon}/minmon_xtrct_reduct.pl ${MINMON_SUFFIX} ${PDY} ${cyc} ${gsistat} dummy + rc_reduct=$? + echo "rc_reduct = $rc_reduct" + +fi + +##################################################################### +# Postprocessing + +err=0 +if [[ ${data_available} -ne 1 ]]; then + err=1 +elif [[ $rc_costs -ne 0 ]]; then + err=$rc_costs +elif [[ $rc_gnorms -ne 0 ]]; then + err=$rc_gnorms +elif [[ $rc_reduct -ne 0 ]]; then + err=$rc_reduct +fi + +exit ${err} + diff --git a/scripts/exgfs_atmos_vminmon.sh b/scripts/exgfs_atmos_vminmon.sh new file mode 100755 index 0000000000..4311878a03 --- /dev/null +++ b/scripts/exgfs_atmos_vminmon.sh @@ -0,0 +1,116 @@ +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" + +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: exgfs_vrfminmon.sh +# Script description: Runs data extract/validation for GSI normalization diag data +# +# Author: Ed Safford Org: NP23 Date: 2015-04-10 +# +# Abstract: This script runs the data extract/validation portion of the +# MinMon package. +# +# Condition codes +# 0 - no problem encountered +# >0 - some problem encountered +# +################################################################################ + + +######################################## +# Set environment +######################################## +export RUN_ENVIR=${RUN_ENVIR:-nco} +export NET=${NET:-gfs} +export RUN=${RUN:-gfs} +export envir=${envir:-prod} + +######################################## +# Command line arguments +######################################## +export PDY=${1:-${PDY:?}} +export cyc=${2:-${cyc:?}} + +######################################## +# Directories +######################################## +export DATA=${DATA:-$(pwd)} + + +######################################## +# Filenames +######################################## +gsistat=${gsistat:-$COMIN/gfs.t${cyc}z.gsistat} +export mm_gnormfile=${gnormfile:-${M_FIXgfs}/gfs_minmon_gnorm.txt} +export mm_costfile=${costfile:-${M_FIXgfs}/gfs_minmon_cost.txt} + +######################################## +# Other variables +######################################## +export MINMON_SUFFIX=${MINMON_SUFFIX:-GFS} +export PDATE=${PDY}${cyc} +export NCP=${NCP:-/bin/cp} +export pgm=exgfs_vrfminmon.sh + + + +if [[ ! -d ${DATA} ]]; then + mkdir $DATA +fi +cd $DATA + +###################################################################### + +data_available=0 + +if [[ -s ${gsistat} ]]; then + + data_available=1 + + #------------------------------------------------------------------ + # Copy the $MINMON_SUFFIX.gnorm_data.txt file to the working directory + # It's ok if it doesn't exist; we'll create a new one if needed. + #------------------------------------------------------------------ + if [[ -s ${M_TANKverf}/gnorm_data.txt ]]; then + $NCP ${M_TANKverf}/gnorm_data.txt gnorm_data.txt + elif [[ -s ${M_TANKverfM1}/gnorm_data.txt ]]; then + $NCP ${M_TANKverfM1}/gnorm_data.txt gnorm_data.txt + fi + + + #------------------------------------------------------------------ + # Run the child sccripts. + #------------------------------------------------------------------ + ${USHminmon}/minmon_xtrct_costs.pl ${MINMON_SUFFIX} ${PDY} ${cyc} ${gsistat} dummy + rc_costs=$? + echo "rc_costs = $rc_costs" + + ${USHminmon}/minmon_xtrct_gnorms.pl ${MINMON_SUFFIX} ${PDY} ${cyc} ${gsistat} dummy + rc_gnorms=$? + echo "rc_gnorms = $rc_gnorms" + + ${USHminmon}/minmon_xtrct_reduct.pl ${MINMON_SUFFIX} ${PDY} ${cyc} ${gsistat} dummy + rc_reduct=$? + echo "rc_reduct = $rc_reduct" + +fi + +##################################################################### +# Postprocessing + +err=0 +if [[ ${data_available} -ne 1 ]]; then + err=1 +elif [[ $rc_costs -ne 0 ]]; then + err=$rc_costs +elif [[ $rc_gnorms -ne 0 ]]; then + err=$rc_gnorms +elif [[ $rc_reduct -ne 0 ]]; then + err=$rc_reduct +fi + +exit ${err} + diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index d3a78422a5..ff77012228 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -181,11 +181,6 @@ if [ -d ../sorc/gsi_monitor.fd ]; then $LINK ../../sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_base.tar . $LINK ../../sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_satype.txt . $LINK ../../sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_scaninfo.txt . - cd ${pwd}/../jobs ||exit 8 - $LINK ../sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gdas/jobs/JGDAS_ATMOS_VMINMON . - $LINK ../sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gfs/jobs/JGFS_ATMOS_VMINMON . - $LINK ../sorc/gsi_monitor.fd/src/Ozone_Monitor/nwprod/gdas_oznmon/jobs/JGDAS_ATMOS_VERFOZN . - $LINK ../sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/jobs/JGDAS_ATMOS_VERFRAD . cd ${pwd}/../parm ||exit 8 [[ -d mon ]] && rm -rf mon mkdir -p mon @@ -195,22 +190,6 @@ if [ -d ../sorc/gsi_monitor.fd ]; then # $LINK ../../sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gfs/parm/gfs_minmon.parm . $LINK ../../sorc/gsi_monitor.fd/src/Ozone_Monitor/nwprod/gdas_oznmon/parm/gdas_oznmon.parm . # $LINK ../../sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/parm/gdas_radmon.parm . - cd ${pwd}/../scripts ||exit 8 - $LINK ../sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gdas/scripts/exgdas_atmos_vminmon.sh . - $LINK ../sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gfs/scripts/exgfs_atmos_vminmon.sh . - $LINK ../sorc/gsi_monitor.fd/src/Ozone_Monitor/nwprod/gdas_oznmon/scripts/exgdas_atmos_verfozn.sh . - $LINK ../sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/scripts/exgdas_atmos_verfrad.sh . - cd ${pwd}/../ush ||exit 8 - $LINK ../sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/minmon_shared/ush/minmon_xtrct_costs.pl . - $LINK ../sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/minmon_shared/ush/minmon_xtrct_gnorms.pl . - $LINK ../sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/minmon_shared/ush/minmon_xtrct_reduct.pl . - $LINK ../sorc/gsi_monitor.fd/src/Ozone_Monitor/nwprod/oznmon_shared/ush/ozn_xtrct.sh . - $LINK ../sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_err_rpt.sh . - $LINK ../sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_angle.sh . - $LINK ../sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_bcoef.sh . - $LINK ../sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_bcor.sh . - $LINK ../sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_time.sh . - $LINK ../sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/ush/rstprod.sh . fi #------------------------------ diff --git a/ush/minmon_xtrct_costs.pl b/ush/minmon_xtrct_costs.pl new file mode 100755 index 0000000000..502032da80 --- /dev/null +++ b/ush/minmon_xtrct_costs.pl @@ -0,0 +1,231 @@ +#!/usr/bin/env perl + +#--------------------------------------------------------------------------- +# minmon_xtrct_costs.pl +# +# Extract cost data from gsistat file and load into cost +# and cost term files. +#--------------------------------------------------------------------------- + +use strict; +use warnings; + +#---------------------------------------------- +# subroutine to trim white space from strings +#---------------------------------------------- +sub trim { my $s = shift; $s =~ s/^\s+|\s+$//g; return $s }; + + +#--------------------------- +# +# Main routine begins here +# +#--------------------------- + +if ($#ARGV != 4 ) { + print "usage: minmon_xtrct_costs.pl SUFFIX PDY cyc infile jlogfile\n"; + exit; +} +my $suffix = $ARGV[0]; + +my $pdy = $ARGV[1]; +my $cyc = $ARGV[2]; +my $infile = $ARGV[3]; +my $jlogfile = $ARGV[4]; + +my $use_costterms = 0; +my $no_data = 0.00; + +my $scr = "minmon_xtrct_costs.pl"; +print "$scr has started\n"; + + +my $rc = 0; +my $cdate = sprintf '%s%s', $pdy, $cyc; + +if( (-e $infile) ) { + + my $found_cost = 0; + my $found_costterms = 0; + my @cost_array; + my @jb_array; + my @jo_array; + my @jc_array; + my @jl_array; + my @term_array; + my @all_cost_terms; + + my $cost_target; + my $cost_number; + my $costterms_target; + my $jb_number = 5; + my $jo_number = 6; + my $jc_number = 7; + my $jl_number = 8; + + my $costfile = $ENV{"mm_costfile"}; + + if( (-e $costfile) ) { + open( COSTFILE, "<${costfile}" ) or die "Can't open ${costfile}: $!\n"; + my $line; + + while( $line = ) { + if( $line =~ /cost_target/ ) { + my @termsline = split( /:/, $line ); + $cost_target = $termsline[1]; + } elsif( $line =~ /cost_number/ ) { + my @termsline = split( /:/, $line ); + $cost_number = $termsline[1]; + } elsif( $line =~ /costterms_target/ ){ + my @termsline = split( /:/, $line ); + $costterms_target = $termsline[1]; + } + } + close( COSTFILE ); + } else { + $rc = 2; + } + + #------------------------------------------------------------------------ + # Open the infile and search for the $costterms_target and $cost_target + # strings. If found, parse out the cost information and push into + # holding arrays. + #------------------------------------------------------------------------ + if( $rc == 0 ) { + open( INFILE, "<${infile}" ) or die "Can't open ${infile}: $!\n"; + + my $line; + my $term_ctr=0; + + while( $line = ) { + + if( $line =~ /$costterms_target/ ) { + my @termsline = split( / +/, $line ); + push( @jb_array, $termsline[$jb_number] ); + push( @jo_array, $termsline[$jo_number] ); + push( @jc_array, $termsline[$jc_number] ); + push( @jl_array, $termsline[$jl_number] ); + $use_costterms = 1; + } + + if( $line =~ /$cost_target/ ) { + my @costline = split( / +/, $line ); + push( @cost_array, $costline[$cost_number] ); + } + + if( $term_ctr > 0 ) { + my @termline = split( / +/, $line ); + + if ( $term_ctr < 10 ) { + push( @term_array, trim($termline[1]) ); + push( @term_array, trim($termline[2]) ); + push( @term_array, trim($termline[3]) ); + $term_ctr++; + } else { + push( @term_array, trim($termline[1]) ); + push( @term_array, trim($termline[2]) ); + $term_ctr = 0; + } + + }elsif ( $line =~ "J=" && $line !~ "EJ=" ) { + my @termline = split( / +/, $line ); + push( @term_array, trim($termline[2]) ); + push( @term_array, trim($termline[3]) ); + push( @term_array, trim($termline[4]) ); + $term_ctr = 1; + } + } + + close( INFILE ); + + + #---------------------------------------------- + # move cost_array into all_costs by iteration + #---------------------------------------------- + my @all_costs; + for my $i (0 .. $#cost_array) { + my $iterline; + if( $use_costterms == 1 ){ + $iterline = sprintf ' %d,%e,%e,%e,%e,%e%s', + $i, $cost_array[$i], $jb_array[$i], $jo_array[$i], + $jc_array[$i], $jl_array[$i], "\n"; + } + else { + $iterline = sprintf ' %d,%e,%e,%e,%e,%e%s', + $i, $cost_array[$i], $no_data, $no_data, + $no_data, $no_data, "\n"; + } + + push( @all_costs, $iterline ); + } + + #--------------------------------------------------- + # move term_array into all_cost_terms by iteration + #--------------------------------------------------- + if( @term_array > 0 ) { + my $nterms = 32; + my $max_iter = ($#term_array+1)/$nterms; + my $niter = $max_iter -1; + + for my $iter (0 .. $niter ) { + my $step = $iter * $nterms; + my $iterline = sprintf '%d, %e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e%s', + $iter, $term_array[$step], $term_array[$step+1], $term_array[$step+2], + $term_array[$step+3], $term_array[$step+4], $term_array[$step+5], + $term_array[$step+6], $term_array[$step+7], $term_array[$step+8], + $term_array[$step+9], $term_array[$step+10], $term_array[$step+11], + $term_array[$step+12], $term_array[$step+13], $term_array[$step+14], + $term_array[$step+15], $term_array[$step+16], $term_array[$step+17], + $term_array[$step+18], $term_array[$step+19], $term_array[$step+20], + $term_array[$step+21], $term_array[$step+22], $term_array[$step+23], + $term_array[$step+24], $term_array[$step+25], $term_array[$step+26], + $term_array[$step+27], $term_array[$step+28], $term_array[$step+29], + $term_array[$step+30], $term_array[$step+31], "\n"; + push( @all_cost_terms, $iterline ); + } + } + + #------------------------------------------ + # write all_costs array to costs.txt file + #------------------------------------------ + my $filename2 = "${cdate}.costs.txt"; + if( @all_costs > 0 ) { + open( OUTFILE, ">$filename2" ) or die "Can't open ${filename2}: $!\n"; + print OUTFILE @all_costs; + close( OUTFILE ); + } + + #----------------------------------------------------- + # write all_cost_terms array to costs_terms.txt file + #----------------------------------------------------- + my $filename3 = "${cdate}.cost_terms.txt"; + if( @all_cost_terms > 0 ) { + open( OUTFILE, ">$filename3" ) or die "Can't open ${filename3}: $!\n"; + print OUTFILE @all_cost_terms; + close( OUTFILE ); + } + + #-------------------------- + # move files to $M_TANKverf + #-------------------------- + my $tankdir = $ENV{"M_TANKverf"}; + if(! -d $tankdir) { + system( "mkdir -p $tankdir" ); + } + + if( -e $filename2 ) { + my $newfile2 = "${tankdir}/${filename2}"; + system("cp -f $filename2 $newfile2"); + } + if( -e $filename3 ) { + my $newfile3 = "${tankdir}/${filename3}"; + system("cp -f $filename3 $newfile3"); + } + + } # $rc still == 0 after reading gmon_cost.txt +} +else { # $infile does not exist + $rc = 1; +} + +print "$scr has ended, return code = $rc \n" diff --git a/ush/minmon_xtrct_gnorms.pl b/ush/minmon_xtrct_gnorms.pl new file mode 100755 index 0000000000..61da749a1d --- /dev/null +++ b/ush/minmon_xtrct_gnorms.pl @@ -0,0 +1,444 @@ +#!/usr/bin/env perl + +use strict; +use warnings; +use List::MoreUtils 'true'; +use List::MoreUtils 'first_index'; +use List::MoreUtils 'last_index'; + +#--------------------------------------------------------------------------- +# minmon_xtrct_gnorms.pl +# +# Update the gnorm_data.txt file with data from a new cycle. Add +# this new data to the last line of the gnorm_data.txt file. +# +# Note: If the gnorm_data.txt file does not exist, it will be created. +# +# The gnorm_data.txt file is used plotted directly by the javascript on +# the GSI stats page. +#--------------------------------------------------------------------------- +sub updateGnormData { + my $cycle = $_[0]; + my $igrad = $_[1]; + my $fgnorm = $_[2]; + my $avg_gnorm = $_[3]; + my $min_gnorm = $_[4]; + my $max_gnorm = $_[5]; + my $suffix = $_[6]; + + my $rc = 0; + my @filearray; + + my $gdfile = "gnorm_data.txt"; + + my $outfile = "new_gnorm_data.txt"; + my $yr = substr( $cycle, 0, 4); + my $mon = substr( $cycle, 4, 2); + my $day = substr( $cycle, 6, 2); + my $hr = substr( $cycle, 8, 2); + + my $newln = sprintf ' %04d,%02d,%02d,%02d,%e,%e,%e,%e,%e%s', + $yr, $mon, $day, $hr, $igrad, $fgnorm, + $avg_gnorm, $min_gnorm, $max_gnorm, "\n"; + + #------------------------------------------------------------- + # attempt to locate the latest $gdfile and copy it locally + # + if( -e $gdfile ) { + open( INFILE, "<${gdfile}" ) or die "Can't open ${gdfile}: $!\n"; + + @filearray = ; + +# This is the mechanism that limits the data to 30 days worth. Should I +# keep it or let the transfer script(s) truncate? 6/12/16 -- I'm going to keep +# it. I can add this as a later change once I add a user mechanism to vary the +# amount of data plotted (on the fly). + + my $cyc_interval = $ENV{'CYCLE_INTERVAL'}; + if( $cyc_interval eq "" ) { + $cyc_interval = 6; + } + + my $max_cyc = 119; # default 30 days worth of data = 120 cycles + # If CYCLE_INTERVAL is other than "" or 6 + # then set the $max_cyc using that interval + if( $cyc_interval != 6 && $cyc_interval != 0 ) { + my $cyc_per_day = 24 / $cyc_interval; + $max_cyc = (30 * $cyc_per_day) - 1; + } + + while( $#filearray > $max_cyc ) { + shift( @filearray ); + } + close( INFILE ); + } + + # Here is the problem Russ encountered after re-running the MinMon: + # If the cycle time in $newln is the same as an existing record in + # *.gnorm_data.txt then we end up with 2+ rows for the same cycle time. + # In that case $newln should replace the first existing line + # in @filearray and all other lines that might match should be deleted. + # Else when the cycle time doesn't already exist (the expected condition) + # it should be pushed into @filearray. + + # algorithm: + # ========= + # Establish $count of matches on "$yr,$mon,$day,$hr" + # if $count > 0 + # while $count > 1 + # get last_index and remove with splice + # replace first_index with $newln + # else + # push $newln + # + my $srch_strng = "$yr,$mon,$day,$hr"; + my $count = true { /$srch_strng/ } @filearray; + + if( $count > 0 ) { + while( $count > 1 ) { + my $l_index = last_index { /$srch_strng/ } @filearray; + splice @filearray, $l_index, 1; + $count = true { /$srch_strng/ } @filearray; + } + my $f_index = first_index { /$srch_strng/ } @filearray; + splice @filearray, $f_index, 1, $newln; + } + else { + push( @filearray, $newln ); + } + + open( OUTFILE, ">$outfile" ) or die "Can't open ${$outfile}: $!\n"; + print OUTFILE @filearray; + close( OUTFILE ); + + system("cp -f $outfile $gdfile"); + +} + +#--------------------------------------------------------------------------- +# makeErrMsg +# +# Apply a gross check on the final value of the gnorm for a specific +# cycle. If the final_gnorm value is greater than the gross_check value +# then put that in the error message file. Also check for resets or a +# premature halt, and journal those events to the error message file too. +# +# Note to self: reset_iter array is passed by reference +#--------------------------------------------------------------------------- +sub makeErrMsg { + my $suffix = $_[0]; + my $cycle = $_[1]; + my $final_gnorm = $_[2]; + my $stop_flag = $_[3]; + my $stop_iter = $_[4]; + my $reset_flag = $_[5]; + my $reset_iter = $_[6]; #reset iteration array + my $infile = $_[7]; + my $gross_check = $_[8]; + + my $mail_msg =""; + my $out_file = "${cycle}.errmsg.txt"; + + + if( $stop_flag > 0 ) { + my $stop_msg = " Gnorm check detected premature iteration stop: suffix = $suffix, cycle = $cycle, iteration = $stop_iter"; + $mail_msg .= $stop_msg; + } + + if( $reset_flag > 0 ) { + my $ctr=0; + my $reset_msg = "\n Gnorm check detected $reset_flag reset(s): suffix = $suffix, cycle = $cycle"; + $mail_msg .= $reset_msg; + $mail_msg .= "\n"; + $mail_msg .= " Reset(s) detected in iteration(s): @{$reset_iter}[$ctr] \n"; + + my $arr_size = @{$reset_iter}; + for( $ctr=1; $ctr < $arr_size; $ctr++ ) { + $mail_msg .= " @{$reset_iter}[$ctr]\n"; + } + } + + if( $final_gnorm >= $gross_check ){ + my $gnorm_msg = " Final gnorm gross check failure: suffix = $suffix, cycle = $cycle, final gnorm = $final_gnorm "; + + $mail_msg .= $gnorm_msg; + } + + if( length $mail_msg > 0 ){ + my $file_msg = " File source for report is: $infile"; + $mail_msg .= $file_msg; + } + + if( length $mail_msg > 0 ){ + my $mail_link = "http://www.emc.ncep.noaa.gov/gmb/gdas/gsi_stat/index.html?src=$suffix&typ=gnorm&cyc=$cycle"; + open( OUTFILE, ">$out_file" ) or die "Can't open ${$out_file}: $!\n"; + print OUTFILE $mail_msg; + print OUTFILE "\n\n $mail_link"; + close( OUTFILE ); + } +} + + +#--------------------------------------------------------------------------- +# +# Main routine begins here +# +#--------------------------------------------------------------------------- + +if ($#ARGV != 4 ) { + print "usage: minmon_xtrct_gnorms.pl SUFFIX pdy cyc infile jlogfile\n"; + exit; +} + + +my $suffix = $ARGV[0]; +my $pdy = $ARGV[1]; +my $cyc = $ARGV[2]; +my $infile = $ARGV[3]; +my $jlogfile = $ARGV[4]; + + +my $scr = "minmon_xtrct_gnorms.pl"; +print "$scr Has Started\n"; + +# +# This needs to be redesigned to get the gnorm value from the gsistat file +# using the line that starts "cost,grad,step,b,step?:". The line formerly +# used for the gnorm and reduction values may not be available if the the +# verbose output flag is set to FALSE. +# +# So, using the grad value on that line: +# gnorm[i] = (grad[i]**)/(grad[0]**) +# reduct[i] = sqrt(gnorm) + +my $igrad_target; +my $igrad_number; +my $expected_gnorms; +my $gross_check_val; + +my $rc = 0; +my $cdate = sprintf '%s%s', $pdy, $cyc; + +my $gnormfile = $ENV{"mm_gnormfile"}; + + +if( (-e $gnormfile) ) { + open( GNORMFILE, "<${gnormfile}" ) or die "Can't open ${gnormfile}: $!\n"; + my $line; + + while( $line = ) { + if( $line =~ /igrad_target/ ) { + my @termsline = split( /:/, $line ); + $igrad_target = $termsline[1]; + } elsif( $line =~ /igrad_number/ ) { + my @termsline = split( /:/, $line ); + $igrad_number = $termsline[1]; + } elsif( $line =~ /expected_gnorms/ ){ + my @termsline = split( /:/, $line ); + $expected_gnorms = $termsline[1]; + } elsif( $line =~ /gross_check_val/ ){ + my @termsline = split( /:/, $line ); + $gross_check_val = $termsline[1]; + } + } + close( GNORMFILE ); +} else { + $rc = 4; +} + +if( $rc == 0 ) { + if( (-e $infile) ) { + open( INFILE, "<${infile}" ) or die "Can't open ${infile}: $!\n"; + + my $found_igrad = 0; + my $final_gnorm = 0.0; + my $igrad = 0.0; + my $header = 4; + my $header2 = 0; + my @gnorm_array; + my @last_10_gnorm; + + my $reset_flag = 0; + my $stop_flag = 0; + my $warn_str = "WARNING"; + my $stop_str = "Stopping"; + my $stop_iter = ""; + my $reset_str = "Reset"; + my @reset_iter; # reset iteration array + + my $stop_iter_flag = 0; + my $reset_iter_flag = 0; + my $line; + while( $line = ) { + + ############################################## + # if the reset_iter_flag is 1 then record the + # current outer & inner iteration number + ############################################## + if( $reset_iter_flag == 1 ) { + if( $line =~ /${igrad_target}/ ) { + my @iterline = split( / +/, $line ); + my $iter_str = $iterline[2] . "," . $iterline[3]; + push( @reset_iter, $iter_str); + $reset_iter_flag = 0; + } + } + + + if( $line =~ /${igrad_target}/ ) { + my @gradline = split( / +/, $line ); + + my $grad = $gradline[$igrad_number]; + + if( $found_igrad == 0 ){ + $igrad = $grad; + $found_igrad = 1; + } + + my $igrad_sqr = $igrad**2; + my $grad_sqr = $grad**2; + my $gnorm = $grad_sqr/$igrad_sqr; + + push( @gnorm_array, $gnorm ); + } + + + if( $line =~ /${warn_str}/ ) { + if( $line =~ /${stop_str}/ ) { + $stop_flag++; + $stop_iter_flag=1; + } + elsif( $line =~ /${reset_str}/ ){ + $reset_flag++; + $reset_iter_flag = 1; + } + } + + } + close( INFILE ); + + ######################################################################## + # If the stop_flag is >0 then record the last outer & inner + # iteration number. The trick is that it's the last iteration in the + # log file and we just passed it when we hit the stop warning message, + # so we have to reopen the file and get the last iteration number. + ######################################################################## + if( $stop_flag > 0 ) { + open( INFILE, "<${infile}" ) or die "Can't open ${infile}: $!\n"; + + my @lines = reverse ; + foreach $line (@lines) { + if( $line =~ /${igrad_target}/ ){ + my @iterline = split( / +/, $line ); + $stop_iter = $iterline[2] . "," . $iterline[3]; + last; + } + } + close( INFILE ); + } + + + my @all_gnorm = @gnorm_array; + + ############################################################################## + ## + ## If the iterations were halted due to error then the @all_gnorm array won't + ## be the expected size. In that case we need to pad the array out with + ## RMISS values so GrADS won't choke when it tries to read the data file. + ## + ## Note that we're padding @all_gnorm. The @gnorm_array is examined below + ## and we don't want to pad that and mess up the min/max calculation. + ## + ############################################################################### + my $arr_size = @all_gnorm; + + if( $arr_size < $expected_gnorms ) { + for( my $ctr = $arr_size; $ctr < $expected_gnorms; $ctr++ ) { + push( @all_gnorm, -999.0 ); + } + } + + my $sum_10_gnorm = 0.0; + my $min_gnorm = 9999999.0; + my $max_gnorm = -9999999.0; + my $avg_gnorm = 0.0; + + for( my $ctr = 9; $ctr >= 0; $ctr-- ) { + my $new_gnorm = pop( @gnorm_array ); + $sum_10_gnorm = $sum_10_gnorm + $new_gnorm; + if( $new_gnorm > $max_gnorm ) { + $max_gnorm = $new_gnorm; + } + if( $new_gnorm < $min_gnorm ) { + $min_gnorm = $new_gnorm; + } + if( $ctr == 9 ) { + $final_gnorm = $new_gnorm; + } + } + + $avg_gnorm = $sum_10_gnorm / 10; + + + ##################################################################### + # Update the gnorm_data.txt file with information on the + # initial gradient, final gnorm, and avg/min/max for the last 10 + # iterations. + ##################################################################### + updateGnormData( $cdate,$igrad,$final_gnorm,$avg_gnorm,$min_gnorm,$max_gnorm,$suffix ); + + + ##################################################################### + # Call makeErrMsg to build the error message file to record any + # abnormalities in the minimization. This file can be mailed by + # a calling script. + ##################################################################### + makeErrMsg( $suffix, $cdate, $final_gnorm, $stop_flag, $stop_iter, $reset_flag, \@reset_iter, $infile, $gross_check_val ); + + + ######################################################### + # write to GrADS ready output data file + # + # Note: this uses pack to achieve the same results as + # an unformatted binary Fortran file. + ######################################################### + my $filename2 = "${cdate}.gnorms.ieee_d"; + + open( OUTFILE, ">$filename2" ) or die "Can't open ${filename2}: $!\n"; + binmode OUTFILE; + + print OUTFILE pack( 'f*', @all_gnorm); + + close( OUTFILE ); + + #-------------------------- + # move files to $M_TANKverf + #-------------------------- + my $tankdir = $ENV{"M_TANKverf"}; + print "M_TANKverf = $tankdir \n"; + if(! -d $tankdir) { + print "making $tankdir\n"; + system( "mkdir -p $tankdir" ); + } + + if( -e $filename2 ) { + system("cp -f $filename2 ${tankdir}/."); + } + + my $gdfile = "gnorm_data.txt"; + if( -e $gdfile ) { + system("cp -f $gdfile ${tankdir}/."); + } + + my $errmsg = "${cdate}.errmsg.txt"; + if( -e $errmsg ) { + system("cp -f $errmsg ${tankdir}/."); + } + + } # $rc still == 0 after reading gmon_gnorm.txt + +}else { # $infile does not exist + $rc = 3; +} + +print "$scr has ended, return code = $rc \n" diff --git a/ush/minmon_xtrct_reduct.pl b/ush/minmon_xtrct_reduct.pl new file mode 100755 index 0000000000..1b8186b6ad --- /dev/null +++ b/ush/minmon_xtrct_reduct.pl @@ -0,0 +1,89 @@ +#!/usr/bin/env perl + +use strict; + +#--------------------------------------------------------------------------- +# minmon_xtrct_reduct.pl +# +# Extract the reduction stats for a GSI minimization run and store in +# reduction.ieee_d files ready for GrADS use. +#--------------------------------------------------------------------------- + +if ($#ARGV != 4 ) { + print "usage: minmon_xtrct_reduct.pl SUFFIX pdy cyc infile jlogfile\n"; + print " suffix is data source identifier\n"; + print " pdy is YYYYMMDD of the cycle to be processed\n"; + print " cyc is HH of the cycle to be processed\n"; + print " infile is the data file containing the reduction stats\n"; + print " jlogfile is the job log file\n"; + exit; +} +my $suffix = $ARGV[0]; +my $pdy = $ARGV[1]; +my $cyc = $ARGV[2]; +my $infile = $ARGV[3]; +my $jlogfile = $ARGV[4]; + +my $scr = "minmon_xtrct_reduct.pl"; +print "$scr has started\n"; + +my $rc = 0; +my $cdate = sprintf '%s%s', $pdy, $cyc; +my $initial_gradient = -999.0; +my $iter_gradient; + +if( (-e $infile) ) { + + my $reduct_target = "cost,grad,step,b,step?"; + my $gradient_num = 5; + my $reduct; + + open( INFILE, "<${infile}" ) or die "Can't open ${infile}: $!\n"; + + my @reduct_array; + + while( my $line = ) { + if( $line =~ /$reduct_target/ ) { + my @reduct_ln = split( / +/, $line ); + $iter_gradient = $reduct_ln[$gradient_num]; + if( $initial_gradient == -999.0 ){ + $initial_gradient = $iter_gradient; + } + + $reduct = $iter_gradient / $initial_gradient; + + push( @reduct_array, $reduct ); + } + } + + close( INFILE ); + + + ################################# + # write reduct_array to outfile + ################################# + my $outfile = "${cdate}.reduction.ieee_d"; + open( OUTFILE, ">$outfile" ) or die "Can't open ${outfile}: $!\n"; + binmode OUTFILE; + + print OUTFILE pack( 'f*', @reduct_array); + close( OUTFILE ); + + #---------------------------- + # copy outfile to $M_TANKverf + #---------------------------- + my $tankdir = $ENV{"M_TANKverf"}; + if(! -d $tankdir) { + system( "mkdir -p $tankdir" ); + } + + if( -e $outfile ) { + my $newfile = "${tankdir}/${outfile}"; + system("cp -f $outfile $newfile"); + } + +} else { # $infile does not exist + $rc = 5; +} + +print "$scr has ended, return code = $rc \n" diff --git a/ush/ozn_xtrct.sh b/ush/ozn_xtrct.sh new file mode 100755 index 0000000000..3303cab2d1 --- /dev/null +++ b/ush/ozn_xtrct.sh @@ -0,0 +1,266 @@ +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" + +#------------------------------------------------------------------ +# ozn_xtrct.sh +# +# This script performs the data extraction from the oznstat +# diagnostic files. The resulting data (*.ieee_d) files, GrADS +# control files and stdout files will be moved to the +# $TANKverf_ozn. +# +# Calling scripts must define: +# $TANKverf_ozn +# $HOMEoznmon +# $PDATE +# +# Return values are +# 0 = normal +# 2 = unable to generate satype list; may indicate no diag +# files found in oznstat file +#------------------------------------------------------------------ + +#-------------------------------------------------- +# check_diag_files +# +# Compare $satype (which contains the contents of +# gdas_oznmon_satype.txt to $avail_satype which is +# determined by the contents of the oznstat file. +# Report any missing diag files in a file named +# bad_diag.$PDATE +# +check_diag_files() { + pdate=$1 + found_satype=$2 + avail_satype=$3 + + out_file="bad_diag.${pdate}" + + echo ""; echo ""; echo "--> check_diag_files" + + for type in ${found_satype}; do + len_check=$(echo ${avail_satype} | grep ${type} | wc -c) + + if [[ ${len_check} -le 1 ]]; then + echo "missing diag file -- diag_${type}_ges.${pdate}.gz not found " >> ./${out_file} + fi + done + + echo "<-- check_diag_files"; echo ""; echo "" +} + + +iret=0 +export NCP=${NCP:-/bin/cp} +VALIDATE_DATA=${VALIDATE_DATA:-0} +nregion=${nregion:-6} +DO_DATA_RPT=${DO_DATA_RPT:-0} + +netcdf_boolean=".false." +if [[ $OZNMON_NETCDF -eq 1 ]]; then + netcdf_boolean=".true." +fi + +OZNMON_NEW_HDR=${OZNMON_NEW_HDR:-0} +new_hdr="F" +if [[ $OZNMON_NEW_HDR -eq 1 ]]; then + new_hdr="T" +fi + +#------------------------------------------------------------------ +# if VALIDATE_DATA then locate and untar base file +# +validate=".FALSE." +if [[ $VALIDATE_DATA -eq 1 ]]; then + if [[ ! -e $ozn_val_file && ! -h $ozn_val_file ]]; then + echo "WARNING: VALIDATE_DATA set to 1, but unable to locate $ozn_val_file" + echo " Setting VALIDATE_DATA to 0/OFF" + VALIDATE_DATA=0 + else + validate=".TRUE." + val_file=$(basename ${ozn_val_file}) + ${NCP} $ozn_val_file $val_file + tar -xvf $val_file + fi +fi +echo "VALIDATE_DATA, validate = $VALIDATE_DATA, $validate " + + + +#------------------------------------------------------------------ +# ozn_ptype here is the processing type which is intended to be "ges" +# or "anl". Default is "ges". +# +ozn_ptype=${ozn_ptype:-"ges anl"} + + +#--------------------------------------------------------------------------- +# Build satype list from the available diag files. +# +# An empty satype list means there are no diag files to process. That's +# a problem, reported by an iret value of 2 +# + +avail_satype=$(ls -l d*ges* | sed -e 's/_/ /g;s/\./ /' | gawk '{ print $11 "_" $12 }') + +if [[ ${DO_DATA_RPT} -eq 1 ]]; then + if [[ -e ${SATYPE_FILE} ]]; then + satype=$(cat ${SATYPE_FILE}) + check_diag_files ${PDATE} "${satype}" "${avail_satype}" + else + echo "WARNING: missing ${SATYPE_FILE}" + fi +fi + +len_satype=$(echo -n "${satype}" | wc -c) + +if [[ ${len_satype} -le 1 ]]; then + satype=${avail_satype} +fi + +echo ${satype} + + +len_satype=$(echo -n "${satype}" | wc -c) + +if [[ ${DO_DATA_RPT} -eq 1 && ${len_satype} -lt 1 ]]; then + iret=2 + +else + + #-------------------------------------------------------------------- + # Copy extraction programs to working directory + # + ${NCP} ${HOMEoznmon}/exec/oznmon_time.x ./oznmon_time.x + if [[ ! -e oznmon_time.x ]]; then + iret=2 + exit ${iret} + fi + ${NCP} ${HOMEoznmon}/exec/oznmon_horiz.x ./oznmon_horiz.x + if [[ ! -e oznmon_horiz.x ]]; then + iret=3 + exit ${iret} + fi + + + #--------------------------------------------------------------------------- + # Outer loop over $ozn_ptype (default values 'ges', 'anl') + # + echo "ozn_ptype = ${ozn_ptype}" + for ptype in ${ozn_ptype}; do + echo "ptype = ${ptype}" + + + for type in ${avail_satype}; do + if [[ -f "diag_${type}_${ptype}.${PDATE}.gz" ]]; then + mv diag_${type}_${ptype}.${PDATE}.gz ${type}.${ptype}.gz + gunzip ./${type}.${ptype}.gz + fi + done + + + #-------------------------------------------------------------------- + # Run programs for given time + + iyy=$(echo ${PDATE} | cut -c1-4) + imm=$(echo ${PDATE} | cut -c5-6) + idd=$(echo ${PDATE} | cut -c7-8) + ihh=$(echo ${PDATE} | cut -c9-10) + + for type in ${avail_satype}; do + echo "processing ptype, type: ${ptype}, ${type}" + rm -f input + +cat << EOF > input + &INPUT + satname='${type}', + iyy=${iyy}, + imm=${imm}, + idd=${idd}, + ihh=${ihh}, + idhh=-720, + incr=6, + nregion=${nregion}, + region(1)='global', rlonmin(1)=-180.0,rlonmax(1)=180.0,rlatmin(1)=-90.0,rlatmax(1)= 90.0, + region(2)='70N-90N', rlonmin(2)=-180.0,rlonmax(2)=180.0,rlatmin(2)= 70.0,rlatmax(2)= 90.0, + region(3)='20N-70N', rlonmin(3)=-180.0,rlonmax(3)=180.0,rlatmin(3)= 20.0,rlatmax(3)= 70.0, + region(4)='20S-20N', rlonmin(4)=-180.0,rlonmax(4)=180.0,rlatmin(4)=-20.0,rlatmax(4)= 20.0, + region(5)='20S-70S', rlonmin(5)=-180.0,rlonmax(5)=180.0,rlatmin(5)=-70.0,rlatmax(5)=-20.0, + region(6)='70S-90S', rlonmin(6)=-180.0,rlonmax(6)=180.0,rlatmin(6)=-90.0,rlatmax(6)=-70.0, + validate=${validate}, + new_hdr=${new_hdr}, + ptype=${ptype}, + netcdf=${netcdf_boolean} + / +EOF + + + echo "oznmon_time.x HAS STARTED ${type}" + + ./oznmon_time.x < input > stdout.time.${type}.${ptype} + + echo "oznmon_time.x HAS ENDED ${type}" + + if [[ ! -d ${TANKverf_ozn}/time ]]; then + mkdir -p ${TANKverf_ozn}/time + fi + $NCP ${type}.${ptype}.ctl ${TANKverf_ozn}/time/ + $NCP ${type}.${ptype}.${PDATE}.ieee_d ${TANKverf_ozn}/time/ + + $NCP bad* ${TANKverf_ozn}/time/ + + rm -f input + +cat << EOF > input + &INPUT + satname='${type}', + iyy=${iyy}, + imm=${imm}, + idd=${idd}, + ihh=${ihh}, + idhh=-18, + incr=6, + new_hdr=${new_hdr}, + ptype=${ptype}, + netcdf=${netcdf_boolean} + / +EOF + + echo "oznmon_horiz.x HAS STARTED ${type}" + + ./oznmon_horiz.x < input > stdout.horiz.${type}.${ptype} + + echo "oznmon_horiz.x HAS ENDED ${type}" + + if [[ ! -d ${TANKverf_ozn}/horiz ]]; then + mkdir -p ${TANKverf_ozn}/horiz + fi + $NCP ${type}.${ptype}.ctl ${TANKverf_ozn}/horiz/ + + $COMPRESS ${type}.${ptype}.${PDATE}.ieee_d + $NCP ${type}.${ptype}.${PDATE}.ieee_d.${Z} ${TANKverf_ozn}/horiz/ + + + echo "finished processing ptype, type: $ptype, $type" + done # type in satype + + done # ptype in $ozn_ptype + + tar -cvf stdout.horiz.tar stdout.horiz* + ${COMPRESS} stdout.horiz.tar + ${NCP} stdout.horiz.tar.${Z} ${TANKverf_ozn}/horiz/ + + tar -cvf stdout.time.tar stdout.time* + ${COMPRESS} stdout.time.tar + ${NCP} stdout.time.tar.${Z} ${TANKverf_ozn}/time/ +fi + +#------------------------------------------------------- +# Conditionally remove data files older than 40 days +# +if [[ ${CLEAN_TANKDIR:-0} -eq 1 ]]; then + ${HOMEoznmon}/ush/clean_tankdir.sh glb 40 +fi + +exit ${iret} diff --git a/ush/radmon_diag_ck.sh b/ush/radmon_diag_ck.sh new file mode 100755 index 0000000000..142e99f8c7 --- /dev/null +++ b/ush/radmon_diag_ck.sh @@ -0,0 +1,175 @@ +#!/bin/bash + +#---------------------------------------------------------------- +# Check the contents of the radstat file and compare to +# the ${run}_radmon_satype.txt file. Report any missing +# or zero sized diag files. +# + + function usage { + echo "Usage: radmon_diag_ck.sh -rad radstat --sat satype --out output " + echo "" + echo " -r,--rad radstat file (required)" + echo " File name or path to radstat file." + echo "" + echo " -s,--sat satype file (required)" + echo " File name or path to satype file." + echo "" + echo " -o,--out output file name (required)" + echo " File name for missing diag file report." + } + + +echo "--> radmon_diag_ck.sh" + + +#-------------------------- +# Process input arguments +# + nargs=$# + if [[ $nargs -ne 6 ]]; then + usage + exit 1 + fi + + while [[ $# -ge 1 ]] + do + key="$1" + echo $key + + case $key in + -r|--rad) + radstat_file="$2" + shift # past argument + ;; + -s|--sat) + satype_file="$2" + shift # past argument + ;; + -o|--out) + output_file="$2" + shift # past argument + ;; + *) + #unspecified key + echo " unsupported key = $key" + ;; + esac + + shift + done + +# set -ax + + echo " radstat_file = ${radstat_file}" + echo " satype_file = ${satype_file}" + echo " output_file = ${output_file}" + + missing_diag="" + zero_len_diag="" + + #--------------------------------------------- + # get list of diag files in the radstat file + # + radstat_contents=`tar -tf ${radstat_file} | grep '_ges' | + gawk -F"diag_" '{print $2}' | + gawk -F"_ges" '{print $1}'` + + + #--------------------------------------------- + # load contents of satype_file into an array + # + satype_contents=`cat ${satype_file}` + + + #------------------------------------------------- + # compare $satype_contents and $radstat_contents + # report anything missing + # + for sat in $satype_contents; do + test=`echo $radstat_contents | grep $sat` + + if [[ ${#test} -le 0 ]]; then + missing_diag="${missing_diag} ${sat}" + fi + + done + + echo "" + echo "missing_diag = ${missing_diag}" + echo "" + + + #--------------------------------------------------------- + # Check for zero sized diag files. The diag files in + # the radstat file (which is a tar file) are gzipped. + # I find that 0 sized, gzipped file has a size of ~52 + # (I assume that's for header and block size). + # + # So for this check we'll assume anything in the radstat + # file with a size of > 1000 bytes is suspect. (That's + # overkill, 100 is probably sufficient, but I'm the + # nervous type.) So we'll extract, uncompress, and check + # the actual file size of those. Anything with an + # uncompressed size of 0 goes on the zero_len_diag list. + # + verbose_contents=`tar -tvf ${radstat_file} | grep '_ges'` + + + #------------------------------------------------------- + # note: need to reset the IFS to line breaks otherwise + # the $vc value in the for loop below will break + # on all white space, not the line break. + SAVEIFS=$IFS + IFS=$(echo -en "\n\b") + + + for vc in ${verbose_contents}; do + + gzip_len=`echo ${vc} | gawk '{print $3}'` + + if [[ ${gzip_len} -le 1000 ]]; then + test_file=`echo ${vc} | gawk '{print $6}'` + tar -xf ${radstat_file} ${test_file} + + gunzip ${test_file} + unzipped_file=`echo ${test_file%.*}` + + uz_file_size=`ls -la ${unzipped_file} | gawk '{print $5}'` + + if [[ ${uz_file_size} -le 0 ]]; then + sat=`echo ${unzipped_file} | gawk -F"diag_" '{print $2}' | + gawk -F"_ges" '{print $1}'` + + zero_len_diag="${zero_len_diag} ${sat}" + fi + + rm -f ${unzipped_file} + fi + done + + IFS=${SAVEIFS} # reset IFS to default (white space) + + echo "" + echo "zero_len_diag = ${zero_len_diag}" + echo "" + + + #----------------------------------------- + # Write results to $output_file + # + if [[ ${#zero_len_diag} -gt 0 ]]; then + for zld in ${zero_len_diag}; do + echo " Zero Length diagnostic file: $zld" >> $output_file + done + fi + + if [[ ${#missing_diag} -gt 0 ]]; then + for md in ${missing_diag}; do + echo " Missing diagnostic file : $md" >> $output_file + done + fi + + +echo "<-- radmon_diag_ck.sh" +exit diff --git a/ush/radmon_err_rpt.sh b/ush/radmon_err_rpt.sh new file mode 100755 index 0000000000..8561563d48 --- /dev/null +++ b/ush/radmon_err_rpt.sh @@ -0,0 +1,194 @@ +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" + +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: radmon_err_rpt.sh +# Script description: Compare the contents of error files from two different +# cycles. +# +# Author: Ed Safford Org: NP23 Date: 2012-02-02 +# +# Abstract: This script compares the contents of two error files from two different +# sets of radiance diagnostic files (which are an output from GSI runs). +# All unique satellite instrument/channel/region combinations that appear +# in both files are reported. +# +# This script is run as a child script of radmon_verf_time.sh. The parent +# script creates/copies the error files into a temporary working +# directory before invoking this script. +# +# +# Usage: radmon_err_rpt.sh file1 file2 type cycle1 cycle2 diag_rpt outfile +# +# Input script positional parameters: +# file1 obs, penalty, or channel error file +# required +# file2 obs, penalty, or channel error file +# required +# type type of error file +# choices are obs, pen, chan, or cnt; required +# cycle1 first cycle processing date +# yyyymmddcc format; required +# cycle2 second cycle processing date +# yyyymmddcc format; required +# diag_rpt diagnostic report text file +# required +# outfile output file name +# required +# +# Remarks: +# +# Condition codes +# 0 - no problem encountered +# >0 - some problem encountered +#################################################################### + +# Command line arguments. +file1=${1:-${file1:?}} +file2=${2:-${file2:?}} +type=${3:-${type:?}} +cycle1=${4:-${cycle1:?}} +cycle2=${5:-${cycle2:?}} +diag_rpt=${6:-${diag_rpt:?}} +outfile=${7:-${outfile:?}} + +# Directories +HOMEradmon=${HOMEradmon:-$(pwd)} + +# Other variables +err=0 +RADMON_SUFFIX=${RADMON_SUFFIX} + +have_diag_rpt=0 +if [[ -s $diag_rpt ]]; then + have_diag_rpt=1 +else + err=1 +fi +echo "have_diag_rpt = $have_diag_rpt" + +#----------------------------------------------------------------------------- +# read each line in the $file1 +# search $file2 for the same satname, channel, and region +# if same combination is in both files, add the values to the output file +# +{ while read myline; do + echo "myline = $myline" + bound="" + + echo $myline + satname=$(echo $myline | gawk '{print $1}') + channel=$(echo $myline | gawk '{print $3}') + region=$(echo $myline | gawk '{print $5}') + value1=$(echo $myline | gawk '{print $7}') + bound=$(echo $myline | gawk '{print $9}') + +# +# Check findings against diag_report. If the satellite/instrument is on the +# diagnostic report it means the diagnostic file file for the +# satelite/instrument is missing for this cycle, so skip any additional +# error checking for that source. Otherwise, evaluate as per normal. +# + + diag_match="" + diag_match_len=0 + + if [[ $have_diag_rpt == 1 ]]; then + diag_match=$(gawk "/$satname/" $diag_rpt) + diag_match_len=$(echo ${#diag_match}) + fi + + + if [[ $diag_match_len == 0 ]]; then + + if [[ $type == "chan" ]]; then + echo "looking for match for $satname and $channel" + { while read myline2; do + satname2=$(echo $myline2 | gawk '{print $1}') + channel2=$(echo $myline2 | gawk '{print $3}') + + if [[ $satname == $satname2 && $channel == $channel2 ]]; then + match="$satname channel= $channel" + echo "match from gawk = $match" + break; + else + match="" + fi + + done } < $file2 + + + else + match=$(gawk "/$satname/ && /channel= $channel / && /region= $region /" $file2) + echo match = $match + + match_len=$(echo ${#match}) + if [[ $match_len > 0 ]]; then + channel2=$(echo $match | gawk '{print $3}') + + if [[ $channel2 != $channel ]]; then + match="" + fi + fi + + fi + match_len=$(echo ${#match}) + + if [[ $match_len > 0 ]]; then + + value2=$(echo $match | gawk '{print $7}') + bound2=$(echo $match | gawk '{print $9}') + + if [[ $type == "chan" ]]; then + tmpa=" $satname channel= $channel" + tmpb="" + + elif [[ $type == "pen" ]]; then + tmpa="$satname channel= $channel region= $region" + tmpb="$cycle1 $value1 $bound" + + elif [[ $type == "cnt" ]]; then + tmpa="$satname channel= $channel region= $region" + tmpb="$cycle1 $value1 $bound" + + else + tmpa="$satname channel= $channel region= $region" + tmpb="$cycle1: $type= $value1" + fi + + line1="$tmpa $tmpb" + echo "$line1" >> $outfile + + if [[ $type != "chan" ]]; then + tmpc=$(echo $tmpa |sed 's/[a-z]/ /g' | sed 's/[0-9]/ /g' | sed 's/=/ /g' | sed 's/_/ /g' | sed 's/-/ /g') + + if [[ $type == "pen" || $type == "cnt" ]]; then + line2=" $tmpc $cycle2 $value2 $bound2" + else + line2=" $tmpc $cycle2: $type= $value2" + fi + + echo "$line2" >> $outfile + fi + + #----------------------------------------- + # add hyperlink to warning entry + # + line3=" http://www.emc.ncep.noaa.gov/gmb/gdas/radiance/es_rad/${RADMON_SUFFIX}/index.html?sat=${satname}®ion=${region}&channel=${channel}&stat=${type}" + if [[ $channel -gt 0 ]]; then + echo "$line3" >> $outfile + echo "" >> $outfile + fi + fi + fi +done } < $file1 + + +################################################################################ +# Post processing + +exit ${err} + diff --git a/ush/radmon_verf_angle.sh b/ush/radmon_verf_angle.sh new file mode 100755 index 0000000000..fc91c5ae77 --- /dev/null +++ b/ush/radmon_verf_angle.sh @@ -0,0 +1,234 @@ +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" + +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: radmon_verf_angle.sh +# Script description: Extract angle dependent data from radiance +# diagnostic files. +# +# Author: Ed Safford Org: NP23 Date: 2012-02-02 +# +# Abstract: This script extracts angle dependent data from radiance +# diagnostic files (which are an output from GSI runs), +# storing the extracted data in small binary files. +# +# This script is a child script of exgdas_vrfyrad.sh.sms. The parent +# script opens and uncompresses the radiance diagnostic file and copies +# other supporting files into a temporary working directory. +# +# +# Usage: radmon_verf_angle.sh PDATE +# +# Input script positional parameters: +# PDATE processing date +# yyyymmddcc format; required +# +# Imported Shell Variables: +# RADMON_SUFFIX data source suffix +# defauls to opr +# EXECradmon executable directory +# defaults to current directory +# RAD_AREA global or regional flag +# defaults to global +# TANKverf_rad data repository +# defaults to current directory +# SATYPE list of satellite/instrument sources +# defaults to none +# VERBOSE Verbose flag (YES or NO) +# defaults to NO +# LITTLE_ENDIAN flag to indicate LE machine +# defaults to 0 (big endian) +# USE_ANL use analysis files as inputs in addition to +# the ges files. Default is 0 (ges only) +# +# Modules and files referenced: +# scripts : +# +# programs : $NCP +# $angle_exec +# +# fixed data : $scaninfo +# +# input data : $data_file +# +# output data: $angle_file +# $angle_ctl +# $pgmout +# +# Remarks: +# +# Condition codes +# 0 - no problem encountered +# >0 - some problem encountered +# +#################################################################### + +# Command line arguments. +RAD_AREA=${RAD_AREA:-glb} +REGIONAL_RR=${REGIONAL_RR:-0} # rapid refresh model flag +rgnHH=${rgnHH:-} +rgnTM=${rgnTM:-} + +export PDATE=${1:-${PDATE:?}} + +echo " REGIONAL_RR, rgnHH, rgnTM = $REGIONAL_RR, $rgnHH, $rgnTM" +netcdf_boolean=".false." +if [[ $RADMON_NETCDF -eq 1 ]]; then + netcdf_boolean=".true." +fi +echo " RADMON_NETCDF, netcdf_boolean = ${RADMON_NETCDF}, $netcdf_boolean" + +which prep_step +which startmsg + +# Directories +FIXgdas=${FIXgdas:-$(pwd)} +EXECradmon=${EXECradmon:-$(pwd)} +TANKverf_rad=${TANKverf_rad:-$(pwd)} + +# File names +export pgmout=${pgmout:-${jlogfile}} +touch $pgmout + +# Other variables +SATYPE=${SATYPE:-} +VERBOSE=${VERBOSE:-NO} +LITTLE_ENDIAN=${LITTLE_ENDIAN:-0} +USE_ANL=${USE_ANL:-0} + + +if [[ $USE_ANL -eq 1 ]]; then + gesanl="ges anl" +else + gesanl="ges" +fi + +err=0 +angle_exec=radmon_angle.x +shared_scaninfo=${shared_scaninfo:-$FIXgdas/gdas_radmon_scaninfo.txt} +scaninfo=scaninfo.txt + +#-------------------------------------------------------------------- +# Copy extraction program and supporting files to working directory + +$NCP ${EXECradmon}/${angle_exec} ./ +$NCP $shared_scaninfo ./${scaninfo} + +if [[ ! -s ./${angle_exec} || ! -s ./${scaninfo} ]]; then + err=2 +else +#-------------------------------------------------------------------- +# Run program for given time + + export pgm=${angle_exec} + + iyy=$(echo $PDATE | cut -c1-4) + imm=$(echo $PDATE | cut -c5-6) + idd=$(echo $PDATE | cut -c7-8) + ihh=$(echo $PDATE | cut -c9-10) + + ctr=0 + fail=0 + touch "./errfile" + + for type in ${SATYPE}; do + + if [[ ! -s ${type} ]]; then + echo "ZERO SIZED: ${type}" + continue + fi + + for dtype in ${gesanl}; do + + echo "pgm = $pgm" + echo "pgmout = $pgmout" + prep_step + + ctr=$(expr $ctr + 1) + + if [[ $dtype == "anl" ]]; then + data_file=${type}_anl.${PDATE}.ieee_d + ctl_file=${type}_anl.ctl + angl_ctl=angle.${ctl_file} + else + data_file=${type}.${PDATE}.ieee_d + ctl_file=${type}.ctl + angl_ctl=angle.${ctl_file} + fi + + if [[ $REGIONAL_RR -eq 1 ]]; then + angl_file=${rgnHH}.${data_file}.${rgnTM} + fi + + + rm input + + nchanl=-999 +cat << EOF > input + &INPUT + satname='${type}', + iyy=${iyy}, + imm=${imm}, + idd=${idd}, + ihh=${ihh}, + idhh=-720, + incr=${CYCLE_INTERVAL}, + nchanl=${nchanl}, + suffix='${RADMON_SUFFIX}', + gesanl='${dtype}', + little_endian=${LITTLE_ENDIAN}, + rad_area='${RAD_AREA}', + netcdf=${netcdf_boolean}, + / +EOF + + startmsg + ./${angle_exec} < input >> ${pgmout} 2>>errfile + export err=$?; err_chk + if [[ $err -ne 0 ]]; then + fail=$(expr $fail + 1) + fi + + if [[ -s ${angl_file} ]]; then + ${COMPRESS} -f ${angl_file} + fi + + if [[ -s ${angl_ctl} ]]; then + ${COMPRESS} -f ${angl_ctl} + fi + + + done # for dtype in ${gesanl} loop + + done # for type in ${SATYPE} loop + + + ${USHradmon}/rstprod.sh + + tar_file=radmon_angle.tar + if compgen -G "angle*.ieee_d* angle*.ctl*" > /dev/null; then + tar -cf $tar_file angle*.ieee_d* angle*.ctl* + ${COMPRESS} ${tar_file} + mv $tar_file.${Z} ${TANKverf_rad}/. + + if [[ $RAD_AREA = "rgn" ]]; then + cwd=$(pwd) + cd ${TANKverf_rad} + tar -xf ${tar_file}.${Z} + rm ${tar_file}.${Z} + cd ${cwd} + fi + fi + + if [[ $ctr -gt 0 && $fail -eq $ctr || $fail -gt $ctr ]]; then + err=3 + fi +fi + +################################################################################ +# Post processing + +exit ${err} diff --git a/ush/radmon_verf_bcoef.sh b/ush/radmon_verf_bcoef.sh new file mode 100755 index 0000000000..fc8110063c --- /dev/null +++ b/ush/radmon_verf_bcoef.sh @@ -0,0 +1,233 @@ +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" + +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: radmon_verf_bcoef.sh +# Script description: Extract bias correction coefficients data from radiance +# diagnostic files. +# +# Author: Ed Safford Org: NP23 Date: 2012-02-02 +# +# Abstract: This script extracts bias correction coefficient related data from +# radiance diagnostic files (which are an output from GSI runs), +# storing the extracted data in small binary files. +# +# This script is a child script of exgdas_vrfyrad.sh.sms. The parent +# script opens and uncompresses the radiance diagnostic file and copies +# other supporting files into a temporary working directory. +# +# +# Usage: radmon_verf_bcoef.sh PDATE +# +# Input script positional parameters: +# PDATE processing date +# yyyymmddcc format; required +# +# Imported Shell Variables: +# RADMON_SUFFIX data source suffix +# defauls to opr +# EXECradmon executable directory +# defaults to current directory +# FIXradmon fixed data directory +# defaults to current directory +# RAD_AREA global or regional flag +# defaults to global +# TANKverf_rad data repository +# defaults to current directory +# SATYPE list of satellite/instrument sources +# defaults to none +# LITTLE_ENDIAN flag for LE machine +# defaults to 0 (big endian) +# USE_ANL use analysis files as inputs in addition to +# the ges files. Default is 0 (ges only) +# +# Modules and files referenced: +# scripts : +# +# programs : $NCP +# $bcoef_exec +# +# fixed data : $biascr +# +# input data : $data_file +# +# output data: $bcoef_file +# $bcoef_ctl +# $pgmout +# +# Remarks: +# +# Condition codes +# 0 - no problem encountered +# >0 - some problem encountered +# +#################################################################### +# Command line arguments. +export PDATE=${1:-${PDATE:?}} + +netcdf_boolean=".false." +if [[ $RADMON_NETCDF -eq 1 ]]; then + netcdf_boolean=".true." +fi +echo " RADMON_NETCDF, netcdf_boolean = ${RADMON_NETCDF}, $netcdf_boolean" + +# Directories +FIXgdas=${FIXgdas:-$(pwd)} +EXECradmon=${EXECradmon:-$(pwd)} +TANKverf_rad=${TANKverf_rad:-$(pwd)} + +# File names +pgmout=${pgmout:-${jlogfile}} +touch $pgmout + +# Other variables +RAD_AREA=${RAD_AREA:-glb} +REGIONAL_RR=${REGIONAL_RR:-0} +rgnHH=${rgnHH:-} +rgnTM=${rgnTM:-} +SATYPE=${SATYPE:-} +LITTLE_ENDIAN=${LITTLE_ENDIAN:-0} +USE_ANL=${USE_ANL:-0} + + +err=0 +bcoef_exec=radmon_bcoef.x + +if [[ $USE_ANL -eq 1 ]]; then + gesanl="ges anl" +else + gesanl="ges" +fi + +#-------------------------------------------------------------------- +# Copy extraction program and supporting files to working directory + +$NCP $EXECradmon/${bcoef_exec} ./${bcoef_exec} +$NCP ${biascr} ./biascr.txt + +if [[ ! -s ./${bcoef_exec} || ! -s ./biascr.txt ]]; then + err=4 +else + + +#-------------------------------------------------------------------- +# Run program for given time + + export pgm=${bcoef_exec} + + iyy=$(echo $PDATE | cut -c1-4) + imm=$(echo $PDATE | cut -c5-6) + idd=$(echo $PDATE | cut -c7-8) + ihh=$(echo $PDATE | cut -c9-10) + + ctr=0 + fail=0 + + nchanl=-999 + npredr=5 + + for type in ${SATYPE}; do + + if [[ ! -s ${type} ]]; then + echo "ZERO SIZED: ${type}" + continue + fi + + for dtype in ${gesanl}; do + + prep_step + + ctr=$(expr $ctr + 1) + + if [[ $dtype == "anl" ]]; then + data_file=${type}_anl.${PDATE}.ieee_d + ctl_file=${type}_anl.ctl + bcoef_ctl=bcoef.${ctl_file} + else + data_file=${type}.${PDATE}.ieee_d + ctl_file=${type}.ctl + bcoef_ctl=bcoef.${ctl_file} + fi + + if [[ $REGIONAL_RR -eq 1 ]]; then + bcoef_file=${rgnHH}.bcoef.${data_file}.${rgnTM} + else + bcoef_file=bcoef.${data_file} + fi + + + rm input + + +cat << EOF > input + &INPUT + satname='${type}', + npredr=${npredr}, + nchanl=${nchanl}, + iyy=${iyy}, + imm=${imm}, + idd=${idd}, + ihh=${ihh}, + idhh=-720, + incr=${CYCLE_INTERVAL}, + suffix='${RADMON_SUFFIX}', + gesanl='${dtype}', + little_endian=${LITTLE_ENDIAN}, + netcdf=${netcdf_boolean}, + / +EOF + startmsg + ./${bcoef_exec} < input >>${pgmout} 2>>errfile + export err=$?; err_chk + if [[ $err -ne 0 ]]; then + fail=$(expr $fail + 1) + fi + + +#------------------------------------------------------------------- +# move data, control, and stdout files to $TANKverf_rad and compress +# + + if [[ -s ${bcoef_file} ]]; then + ${COMPRESS} ${bcoef_file} + fi + + if [[ -s ${bcoef_ctl} ]]; then + ${COMPRESS} ${bcoef_ctl} + fi + + + done # dtype in $gesanl loop + done # type in $SATYPE loop + + + ${USHradmon}/rstprod.sh + + if compgen -G "bcoef*.ieee_d* bcoef*.ctl*" > /dev/null; then + tar_file=radmon_bcoef.tar + tar -cf $tar_file bcoef*.ieee_d* bcoef*.ctl* + ${COMPRESS} ${tar_file} + mv $tar_file.${Z} ${TANKverf_rad} + + if [[ $RAD_AREA = "rgn" ]]; then + cwd=$(pwd) + cd ${TANKverf_rad} + tar -xf ${tar_file}.${Z} + rm ${tar_file}.${Z} + cd ${cwd} + fi + fi + + if [[ $ctr -gt 0 && $fail -eq $ctr || $fail -gt $ctr ]]; then + err=5 + fi +fi + + +################################################################################ +# Post processing + +exit ${err} diff --git a/ush/radmon_verf_bcor.sh b/ush/radmon_verf_bcor.sh new file mode 100755 index 0000000000..c949363b9f --- /dev/null +++ b/ush/radmon_verf_bcor.sh @@ -0,0 +1,226 @@ +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" + +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: radmon_verf_bcor.sh +# Script description: Extract bias correction data from radiance diagnostic +# files. +# +# Author: Ed Safford Org: NP23 Date: 2012-02-02 +# +# Abstract: This script extracts bias correction related data from radiance +# diagnostic files (which are an output from GSI runs), storing the +# extracted data in small binary files. +# +# This script is a child script of exgdas_vrfyrad.sh.sms. The parent +# script opens and uncompresses the radiance diagnostic file and copies +# other supporting files into a temporary working directory. +# +# +# Usage: radmon_verf_bcor.sh PDATE +# +# Input script positional parameters: +# PDATE processing date +# yyyymmddcc format; required +# +# Imported Shell Variables: +# RADMON_SUFFIX data source suffix +# defauls to opr +# EXECradmon executable directory +# defaults to current directory +# RAD_AREA global or regional flag +# defaults to global +# TANKverf_rad data repository +# defaults to current directory +# SATYPE list of satellite/instrument sources +# defaults to none +# LITTLE_ENDIAN flag for little endian machine +# defaults to 0 (big endian) +# USE_ANL use analysis files as inputs in addition to +# the ges files. Default is 0 (ges only) +# +# Modules and files referenced: +# scripts : +# +# programs : $NCP +# $bcor_exec +# +# fixed data : none +# +# input data : $data_file +# +# output data: $bcor_file +# $bcor_ctl +# $pgmout +# +# Remarks: +# +# Condition codes +# 0 - no problem encountered +# >0 - some problem encountered +# +#################################################################### + +# Command line arguments. +export PDATE=${1:-${PDATE:?}} + +# Directories +EXECradmon=${EXECradmon:-$(pwd)} +TANKverf_rad=${TANKverf_rad:-$(pwd)} + +# File names +pgmout=${pgmout:-${jlogfile}} +touch $pgmout + +# Other variables +RAD_AREA=${RAD_AREA:-glb} +SATYPE=${SATYPE:-} +LITTLE_ENDIAN=${LITTLE_ENDIAN:-0} +USE_ANL=${USE_ANL:-0} + +bcor_exec=radmon_bcor.x +err=0 + +netcdf_boolean=".false." +if [[ $RADMON_NETCDF -eq 1 ]]; then + netcdf_boolean=".true." +fi + +if [[ $USE_ANL -eq 1 ]]; then + gesanl="ges anl" +else + gesanl="ges" +fi + + +#-------------------------------------------------------------------- +# Copy extraction program to working directory + +$NCP ${EXECradmon}/${bcor_exec} ./${bcor_exec} + +if [[ ! -s ./${bcor_exec} ]]; then + err=6 +else + + +#-------------------------------------------------------------------- +# Run program for given time + + export pgm=${bcor_exec} + + iyy=$(echo $PDATE | cut -c1-4) + imm=$(echo $PDATE | cut -c5-6) + idd=$(echo $PDATE | cut -c7-8) + ihh=$(echo $PDATE | cut -c9-10) + + ctr=0 + fail=0 + touch "./errfile" + + for type in ${SATYPE}; do + + for dtype in ${gesanl}; do + + prep_step + + ctr=$(expr $ctr + 1) + + if [[ $dtype == "anl" ]]; then + data_file=${type}_anl.${PDATE}.ieee_d + bcor_file=bcor.${data_file} + ctl_file=${type}_anl.ctl + bcor_ctl=bcor.${ctl_file} + stdout_file=stdout.${type}_anl + bcor_stdout=bcor.${stdout_file} + input_file=${type}_anl + else + data_file=${type}.${PDATE}.ieee_d + bcor_file=bcor.${data_file} + ctl_file=${type}.ctl + bcor_ctl=bcor.${ctl_file} + stdout_file=stdout.${type} + bcor_stdout=bcor.${stdout_file} + input_file=${type} + fi + + if [[ -f input ]]; then rm input; fi + + # Check for 0 length input file here and avoid running + # the executable if $input_file doesn't exist or is 0 bytes + # + if [[ -s $input_file ]]; then + nchanl=-999 + +cat << EOF > input + &INPUT + satname='${type}', + iyy=${iyy}, + imm=${imm}, + idd=${idd}, + ihh=${ihh}, + idhh=-720, + incr=6, + nchanl=${nchanl}, + suffix='${RADMON_SUFFIX}', + gesanl='${dtype}', + little_endian=${LITTLE_ENDIAN}, + rad_area='${RAD_AREA}', + netcdf=${netcdf_boolean}, + / +EOF + + startmsg + ./${bcor_exec} < input >> ${pgmout} 2>>errfile + export err=$?; err_chk + if [[ $? -ne 0 ]]; then + fail=$(expr $fail + 1) + fi + + +#------------------------------------------------------------------- +# move data, control, and stdout files to $TANKverf_rad and compress +# + + if [[ -s ${bcor_file} ]]; then + ${COMPRESS} ${bcor_file} + fi + + if [[ -s ${bcor_ctl} ]]; then + ${COMPRESS} ${bcor_ctl} + fi + + fi + done # dtype in $gesanl loop + done # type in $SATYPE loop + + + ${USHradmon}/rstprod.sh + tar_file=radmon_bcor.tar + + if compgen -G "bcor*.ieee_d* bcor*.ctl*" > /dev/null; then + tar -cf $tar_file bcor*.ieee_d* bcor*.ctl* + ${COMPRESS} ${tar_file} + mv $tar_file.${Z} ${TANKverf_rad}/. + + if [[ $RAD_AREA = "rgn" ]]; then + cwd=$(pwd) + cd ${TANKverf_rad} + tar -xf ${tar_file}.${Z} + rm ${tar_file}.${Z} + cd ${cwd} + fi + fi + + if [[ $ctr -gt 0 && $fail -eq $ctr || $fail -gt $ctr ]]; then + err=7 + fi +fi + +################################################################################ +# Post processing + +exit ${err} + diff --git a/ush/radmon_verf_time.sh b/ush/radmon_verf_time.sh new file mode 100755 index 0000000000..f90d0945bb --- /dev/null +++ b/ush/radmon_verf_time.sh @@ -0,0 +1,567 @@ +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" + +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: radmon_verf_time.sh +# Script description: Extract time data from radiance diagnostic files, +# perform data integrity checks. +# +# Author: Ed Safford Org: NP23 Date: 2012-02-02 +# +# Abstract: This script extracts time related data from radiance diagnostic +# files (which are an output from GSI runs), storing the extracted +# data in small binary files. Data integrity checks are performed +# on the data and mail messages are sent if potential errors are +# detected. +# +# This script is a child script of exgdas_vrfyrad.sh.sms. The parent +# script opens and uncompresses the radiance diagnostic file and copies +# other supporting files into a temporary working directory. +# +# +# Usage: radmon_verf_time.sh PDATE +# +# Input script positional parameters: +# PDATE processing date +# yyyymmddcc format; required +# +# Imported Shell Variables: +# DO_DATA_RPT switch to build the data report +# defaults to 1 (on) +# RADMON_SUFFIX data source suffix +# defauls to opr +# EXECradmon executable directory +# defaults to current directory +# FIXgdas fixed data directory +# defaults to current directory +# RAD_AREA global or regional flag +# defaults to global +# TANKverf_rad data repository +# defaults to current directory +# SATYPE list of satellite/instrument sources +# defaults to none +# VERBOSE Verbose flag (YES or NO) +# defaults to NO +# LITTLE_ENDIAN flag for little endian machine +# defaults to 0 (big endian) +# USE_ANL use analysis files as inputs in addition to +# the ges files. Default is 0 (ges only) +# +# Modules and files referenced: +# scripts : +# +# programs : $NCP +# $time_exec +# +# fixed data : gdas_radmon_base.tar +# +# input data : $data_file +# +# output data: $time_file +# $time_ctl +# $pgmout +# $bad_pen +# $bad_chan +# $report +# $diag_report +# +# +# Remarks: +# +# Condition codes +# 0 - no problem encountered +# >0 - some problem encountered +# +#################################################################### + +# Command line arguments. +export PDATE=${1:-${PDATE:?}} + +# Directories +FIXgdas=${FIXgdas:-$(pwd)} +EXECradmon=${EXECradmon:-$(pwd)} +TANKverf_rad=${TANKverf_rad:-$(pwd)} + +# File names +#pgmout=${pgmout:-${jlogfile}} +#touch $pgmout + +radmon_err_rpt=${radmon_err_rpt:-${USHradmon}/radmon_err_rpt.sh} +base_file=${base_file:-$FIXgdas/gdas_radmon_base.tar} +report=report.txt +disclaimer=disclaimer.txt + +diag_report=diag_report.txt +diag_hdr=diag_hdr.txt +diag=diag.txt + +obs_err=obs_err.txt +obs_hdr=obs_hdr.txt +pen_err=pen_err.txt +pen_hdr=pen_hdr.txt + +chan_err=chan_err.txt +chan_hdr=chan_hdr.txt +count_hdr=count_hdr.txt +count_err=count_err.txt + +netcdf_boolean=".false." +if [[ $RADMON_NETCDF -eq 1 ]]; then + netcdf_boolean=".true." +fi + +DO_DATA_RPT=${DO_DATA_RPT:-1} +RADMON_SUFFIX=${RADMON_SUFFIX:-opr} +RAD_AREA=${RAD_AREA:-glb} +REGIONAL_RR=${REGIONAL_RR:-0} +rgnHH=${rgnHH:-} +rgnTM=${rgnTM:-} +SATYPE=${SATYPE:-} +VERBOSE=${VERBOSE:-NO} +LITTLE_ENDIAN=${LITTLE_ENDIAN:-0} + +time_exec=radmon_time.x +USE_ANL=${USE_ANL:-0} +err=0 + +if [[ $USE_ANL -eq 1 ]]; then + gesanl="ges anl" +else + gesanl="ges" +fi + + +#-------------------------------------------------------------------- +# Copy extraction program and base files to working directory +#------------------------------------------------------------------- +$NCP ${EXECradmon}/${time_exec} ./ +if [[ ! -s ./${time_exec} ]]; then + err=8 +fi + +iyy=$(echo $PDATE | cut -c1-4) +imm=$(echo $PDATE | cut -c5-6) +idd=$(echo $PDATE | cut -c7-8) +ihh=$(echo $PDATE | cut -c9-10) +cyc=$ihh +CYCLE=$cyc + +local_base="local_base" +if [[ $DO_DATA_RPT -eq 1 ]]; then + + if [[ -e ${base_file}.${Z} ]]; then + $NCP ${base_file}.${Z} ./${local_base}.{Z} + ${UNCOMPRESS} ${local_base}.${Z} + else + $NCP ${base_file} ./${local_base} + fi + + if [[ ! -s ./${local_base} ]]; then + echo "RED LIGHT: local_base file not found" + else + echo "Confirming local_base file is good = ${local_base}" + tar -xf ./${local_base} + echo "local_base is untarred" + fi +fi + +if [[ $err -eq 0 ]]; then + ctr=0 + fail=0 + + export pgm=${time_exec} +#-------------------------------------------------------------------- +# Loop over each entry in SATYPE +#-------------------------------------------------------------------- + for type in ${SATYPE}; do + + if [[ ! -s ${type} ]]; then + echo "ZERO SIZED: ${type}" + continue + fi + + ctr=$(expr $ctr + 1) + + for dtype in ${gesanl}; do + + rm input + + if [[ $dtype == "anl" ]]; then + data_file=${type}_anl.${PDATE}.ieee_d + ctl_file=${type}_anl.ctl + time_ctl=time.${ctl_file} + else + data_file=${type}.${PDATE}.ieee_d + ctl_file=${type}.ctl + time_ctl=time.${ctl_file} + fi + + if [[ $REGIONAL_RR -eq 1 ]]; then + time_file=${rgnHH}.time.${data_file}.${rgnTM} + else + time_file=time.${data_file} + fi + +#-------------------------------------------------------------------- +# Run program for given satellite/instrument +#-------------------------------------------------------------------- + nchanl=-999 +cat << EOF > input + &INPUT + satname='${type}', + iyy=${iyy}, + imm=${imm}, + idd=${idd}, + ihh=${ihh}, + idhh=-720, + incr=${CYCLE_INTERVAL}, + nchanl=${nchanl}, + suffix='${RADMON_SUFFIX}', + gesanl='${dtype}', + little_endian=${LITTLE_ENDIAN}, + rad_area='${RAD_AREA}', + netcdf=${netcdf_boolean}, + / +EOF + + ./${time_exec} < input >> stdout.${type} 2>>errfile + + if [[ $err -ne 0 ]]; then + fail=$(expr $fail + 1) + fi + +#------------------------------------------------------------------- +# move data, control, and stdout files to $TANKverf_rad and compress +#------------------------------------------------------------------- + cat stdout.${type} >> stdout.time + + if [[ -s ${time_file} ]]; then + ${COMPRESS} ${time_file} + fi + + if [[ -s ${time_ctl} ]]; then + ${COMPRESS} ${time_ctl} + fi + + done + done + + + ${USHradmon}/rstprod.sh + + if compgen -G "time*.ieee_d* time*.ctl*" > /dev/null; then + tar_file=radmon_time.tar + tar -cf $tar_file time*.ieee_d* time*.ctl* + ${COMPRESS} ${tar_file} + mv $tar_file.${Z} ${TANKverf_rad}/. + + if [[ $RAD_AREA = "rgn" ]]; then + cwd=$(pwd) + cd ${TANKverf_rad} + tar -xf ${tar_file}.${Z} + rm ${tar_file}.${Z} + cd ${cwd} + fi + fi + + if [[ $ctr -gt 0 && $fail -eq $ctr || $fail -gt $ctr ]]; then + echo "fail, ctr = $fail, $ctr" + err=10 + fi + +fi + + + +#################################################################### +#------------------------------------------------------------------- +# Begin error analysis and reporting +#------------------------------------------------------------------- +#################################################################### + +if [[ $DO_DATA_RPT -eq 1 ]]; then + +#--------------------------- +# build report disclaimer +# + cat << EOF > ${disclaimer} + + +*********************** WARNING *************************** +THIS IS AN AUTOMATED EMAIL. REPLIES TO SENDER WILL NOT BE +RECEIVED. PLEASE DIRECT REPLIES TO edward.safford@noaa.gov +*********************** WARNING *************************** +EOF + + +#------------------------------------------------------------------- +# Check for missing diag files +# + tmp_satype="./tmp_satype.txt" + echo ${SATYPE} > ${tmp_satype} + ${USHradmon}/radmon_diag_ck.sh --rad ${radstat} --sat ${tmp_satype} --out ${diag} + + if [[ -s ${diag} ]]; then + cat << EOF > ${diag_hdr} + + Problem Reading Diagnostic File + + + Problems were encountered reading the diagnostic file for + the following sources: + +EOF + + cat ${diag_hdr} >> ${diag_report} + cat ${diag} >> ${diag_report} + + echo >> ${diag_report} + + rm ${diag_hdr} + fi + +#------------------------------------------------------------------- +# move warning notification to TANKverf +# + if [[ -s ${diag} ]]; then + lines=$(wc -l <${diag}) + echo "lines in diag = $lines" + + if [[ $lines -gt 0 ]]; then + cat ${diag_report} + cp ${diag} ${TANKverf_rad}/bad_diag.${PDATE} + else + rm ${diag_report} + fi + fi + + + + #---------------------------------------------------------------- + # Identify bad_pen and bad_chan files for this cycle and + # previous cycle + + bad_pen=bad_pen.${PDATE} + bad_chan=bad_chan.${PDATE} + low_count=low_count.${PDATE} + + qdate=$($NDATE -${CYCLE_INTERVAL} $PDATE) + pday=$(echo $qdate | cut -c1-8) + + prev_bad_pen=bad_pen.${qdate} + prev_bad_chan=bad_chan.${qdate} + prev_low_count=low_count.${qdate} + + prev_bad_pen=${TANKverf_radM1}/${prev_bad_pen} + prev_bad_chan=${TANKverf_radM1}/${prev_bad_chan} + prev_low_count=${TANKverf_radM1}/${prev_low_count} + + if [[ -s $bad_pen ]]; then + echo "pad_pen = $bad_pen" + fi + if [[ -s $prev_bad_pen ]]; then + echo "prev_pad_pen = $prev_bad_pen" + fi + + if [[ -s $bad_chan ]]; then + echo "bad_chan = $bad_chan" + fi + if [[ -s $prev_bad_chan ]]; then + echo "prev_bad_chan = $prev_bad_chan" + fi + if [[ -s $low_count ]]; then + echo "low_count = $low_count" + fi + if [[ -s $prev_low_count ]]; then + echo "prev_low_count = $prev_low_count" + fi + + do_pen=0 + do_chan=0 + do_cnt=0 + + if [[ -s $bad_pen && -s $prev_bad_pen ]]; then + do_pen=1 + fi + + if [[ -s $low_count && -s $prev_low_count ]]; then + do_cnt=1 + fi + + #-------------------------------------------------------------------- + # avoid doing the bad_chan report for REGIONAL_RR sources -- because + # they run hourly they often have 0 count channels for off-hour runs. + # + if [[ -s $bad_chan && -s $prev_bad_chan && REGIONAL_RR -eq 0 ]]; then + do_chan=1 + fi + + #-------------------------------------------------------------------- + # Remove extra spaces in new bad_pen & low_count files + # + if [[ -s ${bad_pen} ]]; then + gawk '{$1=$1}1' $bad_pen > tmp.bad_pen + mv -f tmp.bad_pen $bad_pen + fi + if [[ -s ${low_count} ]]; then + gawk '{$1=$1}1' $low_count > tmp.low_count + mv -f tmp.low_count $low_count + fi + + echo " do_pen, do_chan, do_cnt = $do_pen, $do_chan, $do_cnt" + echo " diag_report = $diag_report " + if [[ $do_pen -eq 1 || $do_chan -eq 1 || $do_cnt -eq 1 || -s ${diag_report} ]]; then + + if [[ $do_pen -eq 1 ]]; then + + echo "calling radmon_err_rpt for pen" + ${radmon_err_rpt} ${prev_bad_pen} ${bad_pen} pen ${qdate} \ + ${PDATE} ${diag_report} ${pen_err} + fi + + if [[ $do_chan -eq 1 ]]; then + + echo "calling radmon_err_rpt for chan" + ${radmon_err_rpt} ${prev_bad_chan} ${bad_chan} chan ${qdate} \ + ${PDATE} ${diag_report} ${chan_err} + fi + + if [[ $do_cnt -eq 1 ]]; then + + echo "calling radmon_err_rpt for cnt" + ${radmon_err_rpt} ${prev_low_count} ${low_count} cnt ${qdate} \ + ${PDATE} ${diag_report} ${count_err} + fi + + #------------------------------------------------------------------- + # put together the unified error report with any obs, chan, and + # penalty problems and mail it + + if [[ -s ${obs_err} || -s ${pen_err} || -s ${chan_err} || -s ${count_err} || -s ${diag_report} ]]; then + + echo DOING ERROR REPORTING + + + cat << EOF > $report +Radiance Monitor warning report + + Net: ${RADMON_SUFFIX} + Run: ${RUN} + Cycle: $PDATE + +EOF + + if [[ -s ${diag_report} ]]; then + echo OUTPUTING DIAG_REPORT + cat ${diag_report} >> $report + fi + + if [[ -s ${chan_err} ]]; then + + echo OUTPUTING CHAN_ERR + + cat << EOF > ${chan_hdr} + + The following channels report 0 observational counts over the past two cycles: + + Satellite/Instrument Channel + ==================== ======= + +EOF + + cat ${chan_hdr} >> $report + cat ${chan_err} >> $report + + fi + + if [[ -s ${count_err} ]]; then + + cat << EOF > ${count_hdr} + + + + The following channels report abnormally low observational counts in the latest 2 cycles: + +Satellite/Instrument Obs Count Avg Count +==================== ========= ========= + +EOF + + cat ${count_hdr} >> $report + cat ${count_err} >> $report + fi + + + if [[ -s ${pen_err} ]]; then + + cat << EOF > ${pen_hdr} + + + Penalty values outside of the established normal range were found + for these sensor/channel/regions in the past two cycles: + + Questionable Penalty Values + ============ ======= ====== Cycle Penalty Bound + ----- ------- ----- +EOF + cat ${pen_hdr} >> $report + cat ${pen_err} >> $report + rm -f ${pen_hdr} + rm -f ${pen_err} + fi + + echo >> $report + cat ${disclaimer} >> $report + echo >> $report + fi + + #------------------------------------------------------------------- + # dump report to log file + # + if [[ -s ${report} ]]; then + lines=$(wc -l <${report}) + if [[ $lines -gt 2 ]]; then + cat ${report} + + $NCP ${report} ${TANKverf_rad}/warning.${PDATE} + fi + fi + + + fi + + #------------------------------------------------------------------- + # copy new bad_pen, bad_chan, and low_count files to $TANKverf_rad + # + if [[ -s ${bad_chan} ]]; then + mv ${bad_chan} ${TANKverf_rad}/. + fi + + if [[ -s ${bad_pen} ]]; then + mv ${bad_pen} ${TANKverf_rad}/. + fi + + if [[ -s ${low_count} ]]; then + mv ${low_count} ${TANKverf_rad}/. + fi + + +fi + + for type in ${SATYPE}; do + rm -f stdout.${type} + done + +################################################################################ +#------------------------------------------------------------------- +# end error reporting section +#------------------------------------------------------------------- +################################################################################ + +################################################################################ +# Post processing + +exit ${err} diff --git a/ush/rstprod.sh b/ush/rstprod.sh new file mode 100755 index 0000000000..acac0340bb --- /dev/null +++ b/ush/rstprod.sh @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" + +#--------------------------------------------------------- +# rstprod.sh +# +# Restrict data from select sensors and satellites +#--------------------------------------------------------- + +# Restrict select sensors and satellites + +export CHGRP_CMD=${CHGRP_CMD:-"chgrp ${group_name:-rstprod}"} +rlist="saphir abi_g16" +for rtype in $rlist; do + if compgen -G "*${rtype}*" > /dev/null; then + ${CHGRP_CMD} *${rtype}* + fi +done From 692e3fc891e32d7b7d1a55aca63bb43a3b8ad9b0 Mon Sep 17 00:00:00 2001 From: Cory Martin Date: Tue, 23 Aug 2022 16:13:38 -0400 Subject: [PATCH 3/7] Add in stubs for aerosol DA tasks + bugfix for setup_expt where cycled and ATMA are used (#990) This PR adds in stubs for aerosol DA tasks (jobs/rocoto shell scripts), the ability for setup_expt and setup_xml to include three aerosol DA related tasks, and in the process of adding this capability, fixes a bug in which the combination of ATMA and cycled revealed that @property was being used incorrectly. Fixes #981 --- jobs/rocoto/aeroanlfinal.sh | 12 +++++++ jobs/rocoto/aeroanlinit.sh | 12 +++++++ jobs/rocoto/aeroanlrun.sh | 12 +++++++ jobs/rocoto/atmanalpost.sh | 4 ++- jobs/rocoto/atmanalprep.sh | 4 ++- jobs/rocoto/atmanalrun.sh | 4 ++- jobs/rocoto/atmensanalpost.sh | 4 ++- jobs/rocoto/atmensanalprep.sh | 4 ++- jobs/rocoto/atmensanalrun.sh | 4 ++- workflow/applications.py | 6 ++++ workflow/rocoto/workflow_tasks.py | 53 +++++++++++++++++++++++++++++-- workflow/rocoto/workflow_xml.py | 2 +- workflow/setup_expt.py | 2 +- 13 files changed, 112 insertions(+), 11 deletions(-) create mode 100755 jobs/rocoto/aeroanlfinal.sh create mode 100755 jobs/rocoto/aeroanlinit.sh create mode 100755 jobs/rocoto/aeroanlrun.sh diff --git a/jobs/rocoto/aeroanlfinal.sh b/jobs/rocoto/aeroanlfinal.sh new file mode 100755 index 0000000000..1906cc1e7f --- /dev/null +++ b/jobs/rocoto/aeroanlfinal.sh @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +. $HOMEgfs/ush/load_fv3gfs_modules.sh +status=$? +[[ $status -ne 0 ]] && exit $status + +############################################################### +echo "Do nothing for now" diff --git a/jobs/rocoto/aeroanlinit.sh b/jobs/rocoto/aeroanlinit.sh new file mode 100755 index 0000000000..1906cc1e7f --- /dev/null +++ b/jobs/rocoto/aeroanlinit.sh @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +. $HOMEgfs/ush/load_fv3gfs_modules.sh +status=$? +[[ $status -ne 0 ]] && exit $status + +############################################################### +echo "Do nothing for now" diff --git a/jobs/rocoto/aeroanlrun.sh b/jobs/rocoto/aeroanlrun.sh new file mode 100755 index 0000000000..1906cc1e7f --- /dev/null +++ b/jobs/rocoto/aeroanlrun.sh @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +. $HOMEgfs/ush/load_fv3gfs_modules.sh +status=$? +[[ $status -ne 0 ]] && exit $status + +############################################################### +echo "Do nothing for now" diff --git a/jobs/rocoto/atmanalpost.sh b/jobs/rocoto/atmanalpost.sh index 90a9b9bace..a54b7d5a7d 100755 --- a/jobs/rocoto/atmanalpost.sh +++ b/jobs/rocoto/atmanalpost.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules diff --git a/jobs/rocoto/atmanalprep.sh b/jobs/rocoto/atmanalprep.sh index e4b76c8407..c6df5287d7 100755 --- a/jobs/rocoto/atmanalprep.sh +++ b/jobs/rocoto/atmanalprep.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules diff --git a/jobs/rocoto/atmanalrun.sh b/jobs/rocoto/atmanalrun.sh index cebe478b7e..0768f2f55d 100755 --- a/jobs/rocoto/atmanalrun.sh +++ b/jobs/rocoto/atmanalrun.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules diff --git a/jobs/rocoto/atmensanalpost.sh b/jobs/rocoto/atmensanalpost.sh index ea6e490f20..f06f5aa01c 100755 --- a/jobs/rocoto/atmensanalpost.sh +++ b/jobs/rocoto/atmensanalpost.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules diff --git a/jobs/rocoto/atmensanalprep.sh b/jobs/rocoto/atmensanalprep.sh index 5ed434c6bf..19ec88f87c 100755 --- a/jobs/rocoto/atmensanalprep.sh +++ b/jobs/rocoto/atmensanalprep.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules diff --git a/jobs/rocoto/atmensanalrun.sh b/jobs/rocoto/atmensanalrun.sh index ddb3bb1432..5899eeaf40 100755 --- a/jobs/rocoto/atmensanalrun.sh +++ b/jobs/rocoto/atmensanalrun.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules diff --git a/workflow/applications.py b/workflow/applications.py index 1766c4071f..713c68727c 100644 --- a/workflow/applications.py +++ b/workflow/applications.py @@ -217,6 +217,9 @@ def _cycled_configs(self): if self.do_wafs: configs += ['wafs', 'wafsgrib2', 'wafsblending', 'wafsgcip', 'wafsgrib20p25', 'wafsblending0p25'] + if self.do_aero: + configs += ['aeroanlinit', 'aeroanlrun', 'aeroanlfinal'] + return configs @property @@ -348,6 +351,9 @@ def _get_cycled_task_names(self): gdas_gfs_common_tasks_before_fcst += ['sfcanl', 'analcalc'] + if self.do_aero: + gdas_gfs_common_tasks_before_fcst += ['aeroanlinit', 'aeroanlrun', 'aeroanlfinal'] + gldas_tasks = ['gldas'] wave_prep_tasks = ['waveinit', 'waveprep'] wave_bndpnt_tasks = ['wavepostbndpnt', 'wavepostbndpntbll'] diff --git a/workflow/rocoto/workflow_tasks.py b/workflow/rocoto/workflow_tasks.py index c34605b52a..06321c3d7e 100644 --- a/workflow/rocoto/workflow_tasks.py +++ b/workflow/rocoto/workflow_tasks.py @@ -16,6 +16,7 @@ class Tasks: 'earc', 'ecen', 'echgres', 'ediag', 'efcs', 'eobs', 'eomg', 'epos', 'esfc', 'eupd', 'atmensanalprep', 'atmensanalrun', 'atmensanalpost', + 'aeroanlinit', 'aeroanlrun', 'aeroanlfinal', 'fcst', 'post', 'ocnpost', 'vrfy', 'metp', 'postsnd', 'awips', 'gempak', 'wafs', 'wafsblending', 'wafsblending0p25', @@ -450,6 +451,54 @@ def atmanalpost(self): return task + def aeroanlinit(self): + + suffix = self._base["SUFFIX"] + dump_suffix = self._base["DUMP_SUFFIX"] + gfs_cyc = self._base["gfs_cyc"] + dmpdir = self._base["DMPDIR"] + + deps = [] + dep_dict = {'type': 'metatask', 'name': 'gdaspost', 'offset': '-06:00:00'} + deps.append(rocoto.add_dependency(dep_dict)) + data = f'&ROTDIR;/gdas.@Y@m@d/@H/atmos/gdas.t@Hz.atmf009{suffix}' + dep_dict = {'type': 'data', 'data': data, 'offset': '-06:00:00'} + deps.append(rocoto.add_dependency(dep_dict)) + data = f'{dmpdir}/{self.cdump}{dump_suffix}.@Y@m@d/@H/{self.cdump}.t@Hz.updated.status.tm00.bufr_d' + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('aeroanlinit') + task = create_wf_task('aeroanlinit', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + return task + + def aeroanlrun(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}aeroanlinit'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('aeroanlrun') + task = create_wf_task('aeroanlrun', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def aeroanlfinal(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}aeroanlrun'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'cycleexist', 'offset': '-06:00:00'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('aeroanlfinal') + task = create_wf_task('aeroanlfinal', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + def gldas(self): deps = [] @@ -470,7 +519,7 @@ def fcst(self): 'cycled': self._fcst_cycled} try: - task = fcst_map[self.app_config.mode] + task = fcst_map[self.app_config.mode]() except KeyError: raise NotImplementedError(f'{self.app_config.mode} is not a valid type.\n' + 'Currently supported forecast types are:\n' + @@ -478,7 +527,6 @@ def fcst(self): return task - @property def _fcst_forecast_only(self): dependencies = [] @@ -531,7 +579,6 @@ def _fcst_forecast_only(self): return task - @property def _fcst_cycled(self): dep_dict = {'type': 'task', 'name': f'{self.cdump}sfcanl'} diff --git a/workflow/rocoto/workflow_xml.py b/workflow/rocoto/workflow_xml.py index e0786964f9..440ff93db5 100644 --- a/workflow/rocoto/workflow_xml.py +++ b/workflow/rocoto/workflow_xml.py @@ -113,7 +113,7 @@ def _get_cycledefs_cycled(self): sdate = self._base['SDATE'].strftime('%Y%m%d%H%M') edate = self._base['EDATE'].strftime('%Y%m%d%H%M') interval = self._base.get('INTERVAL', '06:00:00') - strings = [f'\t{sdate} {edate} {interval}'] + strings = [f'\t{sdate} {edate} {interval}\n'] if self._app_config.gfs_cyc != 0: sdate_gfs = self._base['SDATE_GFS'].strftime('%Y%m%d%H%M') diff --git a/workflow/setup_expt.py b/workflow/setup_expt.py index 566e936d24..85145f6472 100755 --- a/workflow/setup_expt.py +++ b/workflow/setup_expt.py @@ -228,7 +228,7 @@ def input_args(): cycled.add_argument('--nens', help='number of ensemble members', type=int, required=False, default=20) cycled.add_argument('--app', help='UFS application', type=str, - choices=['ATM', 'ATMW'], required=False, default='ATM') + choices=['ATM', 'ATMW', 'ATMA'], required=False, default='ATM') # forecast only mode additional arguments forecasts.add_argument('--app', help='UFS application', type=str, choices=[ From aae25112286e70e2f662b15f230bba958dfb897b Mon Sep 17 00:00:00 2001 From: benwgreen <54944233+benwgreen@users.noreply.github.com> Date: Wed, 24 Aug 2022 09:17:24 -0600 Subject: [PATCH 4/7] Bugfix in arch.sh to remove hardwired "htar" (#992) To support "LOCALARCH", $TARCMD is used instead of hardwiring "htar". One line had a hardwired htar; this PR changes "htar" to "$TARCMD". --- jobs/rocoto/arch.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/jobs/rocoto/arch.sh b/jobs/rocoto/arch.sh index 05eb34e1ad..4e74f0ca26 100755 --- a/jobs/rocoto/arch.sh +++ b/jobs/rocoto/arch.sh @@ -213,7 +213,7 @@ if [ $CDUMP = "gfs" ]; then # Aerosols if [ $DO_AERO = "YES" ]; then for targrp in chem; do - htar -P -cvf $ATARDIR/$CDATE/${targrp}.tar $(cat $ARCH_LIST/${targrp}.txt) + $TARCMD -P -cvf $ATARDIR/$CDATE/${targrp}.tar $(cat $ARCH_LIST/${targrp}.txt) status=$? if [ $status -ne 0 -a $CDATE -ge $firstday ]; then echo "HTAR $CDATE ${targrp}.tar failed" From 3a8fc8ecd2fdbe8484dd882a512e765d3c70b307 Mon Sep 17 00:00:00 2001 From: Kate Friedman Date: Thu, 25 Aug 2022 13:49:24 -0400 Subject: [PATCH 5/7] Add missing "atmos" into job dependencies (#998) The "atmos" subfolder in the GDA was missing in the dependency path for the prep and atmanalprep jobs. Add it in for correctness. Updated dependencies in workflow/rocoto/workflow_tasks.py. Refs: #997 --- workflow/rocoto/workflow_tasks.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/workflow/rocoto/workflow_tasks.py b/workflow/rocoto/workflow_tasks.py index 06321c3d7e..4d2b980b94 100644 --- a/workflow/rocoto/workflow_tasks.py +++ b/workflow/rocoto/workflow_tasks.py @@ -245,7 +245,7 @@ def prep(self): data = f'&ROTDIR;/gdas.@Y@m@d/@H/atmos/gdas.t@Hz.atmf009{suffix}' dep_dict = {'type': 'data', 'data': data, 'offset': '-06:00:00'} deps.append(rocoto.add_dependency(dep_dict)) - data = f'{dmpdir}/{self.cdump}{dump_suffix}.@Y@m@d/@H/{self.cdump}.t@Hz.updated.status.tm00.bufr_d' + data = f'{dmpdir}/{self.cdump}{dump_suffix}.@Y@m@d/@H/atmos/{self.cdump}.t@Hz.updated.status.tm00.bufr_d' dep_dict = {'type': 'data', 'data': data} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) @@ -406,7 +406,7 @@ def atmanalprep(self): data = f'&ROTDIR;/gdas.@Y@m@d/@H/atmos/gdas.t@Hz.atmf009{suffix}' dep_dict = {'type': 'data', 'data': data, 'offset': '-06:00:00'} deps.append(rocoto.add_dependency(dep_dict)) - data = f'{dmpdir}/{self.cdump}{dump_suffix}.@Y@m@d/@H/{self.cdump}.t@Hz.updated.status.tm00.bufr_d' + data = f'{dmpdir}/{self.cdump}{dump_suffix}.@Y@m@d/@H/atmos/{self.cdump}.t@Hz.updated.status.tm00.bufr_d' dep_dict = {'type': 'data', 'data': data} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) @@ -464,7 +464,7 @@ def aeroanlinit(self): data = f'&ROTDIR;/gdas.@Y@m@d/@H/atmos/gdas.t@Hz.atmf009{suffix}' dep_dict = {'type': 'data', 'data': data, 'offset': '-06:00:00'} deps.append(rocoto.add_dependency(dep_dict)) - data = f'{dmpdir}/{self.cdump}{dump_suffix}.@Y@m@d/@H/{self.cdump}.t@Hz.updated.status.tm00.bufr_d' + data = f'{dmpdir}/{self.cdump}{dump_suffix}.@Y@m@d/@H/atmos/{self.cdump}.t@Hz.updated.status.tm00.bufr_d' dep_dict = {'type': 'data', 'data': data} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) @@ -1032,7 +1032,7 @@ def atmensanalprep(self): data = f'&ROTDIR;/gdas.@Y@m@d/@H/atmos/gdas.t@Hz.atmf009{suffix}' dep_dict = {'type': 'data', 'data': data, 'offset': '-06:00:00'} deps.append(rocoto.add_dependency(dep_dict)) - data = f'{dmpdir}/{self.cdump}{dump_suffix}.@Y@m@d/@H/{self.cdump}.t@Hz.updated.status.tm00.bufr_d' + data = f'{dmpdir}/{self.cdump}{dump_suffix}.@Y@m@d/@H/atmos/{self.cdump}.t@Hz.updated.status.tm00.bufr_d' dep_dict = {'type': 'data', 'data': data} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) From 1f142296279730566610e10fb93bd8beb636af07 Mon Sep 17 00:00:00 2001 From: Walter Kolczynski - NOAA Date: Thu, 25 Aug 2022 15:06:24 -0400 Subject: [PATCH 6/7] Fix preamble id (#996) I was too clever by half when writing the preamble and used a check for an argument that doesn't actually work. Using the more typical method works correctly. --- ush/preamble.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ush/preamble.sh b/ush/preamble.sh index bfa326f103..3effebb41f 100644 --- a/ush/preamble.sh +++ b/ush/preamble.sh @@ -19,7 +19,7 @@ # ####### set +x -if [[ -v '1' ]]; then +if (( $# > 0 )); then id="(${1})" else id="" From 5429096ec944ba3133ff7b77ad0587c4b7b0ffc6 Mon Sep 17 00:00:00 2001 From: Kate Friedman Date: Fri, 26 Aug 2022 11:40:46 -0400 Subject: [PATCH 7/7] Update UFS_UTILS tag to `ufs_utils_1_8_0` (#1001) Update UFS_UTILS tag to ufs_utils_1_8_0 New tag contains: - cleanup of decommissioned platforms - adding support for WCOSS2 - regression test updates - code updates to chgres_cube and emcsfc_snow2mdl - some small script updates and cleanup - change to the FNAISC file from CFSR.SEAICE.1982.2012.monthly.clim.grb to IMS-NIC.blended.ice.monthly.clim.grb Refs #974 --- Externals.cfg | 2 +- sorc/checkout.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Externals.cfg b/Externals.cfg index 221bfc55be..90c4494222 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -8,7 +8,7 @@ protocol = git required = True [UFS-Utils] -hash = a2b0817 +hash = ufs_utils_1_8_0 local_path = sorc/ufs_utils.fd repo_url = https://github.com/NOAA-EMC/UFS_UTILS.git protocol = git diff --git a/sorc/checkout.sh b/sorc/checkout.sh index ff3792f67a..a099a17943 100755 --- a/sorc/checkout.sh +++ b/sorc/checkout.sh @@ -152,7 +152,7 @@ mkdir -p ${logdir} # The checkout version should always be a speciifc commit (hash or tag), not a branch errs=0 checkout "ufs_model.fd" "https://github.com/ufs-community/ufs-weather-model" "${ufs_model_hash}"; errs=$((errs + $?)) -checkout "ufs_utils.fd" "https://github.com/ufs-community/UFS_UTILS.git" "a2b0817" ; errs=$((errs + $?)) +checkout "ufs_utils.fd" "https://github.com/ufs-community/UFS_UTILS.git" "ufs_utils_1_8_0" ; errs=$((errs + $?)) checkout "verif-global.fd" "https://github.com/NOAA-EMC/EMC_verif-global.git" "c267780" ; errs=$((errs + $?)) if [[ $CHECKOUT_GSI == "YES" ]]; then