From 4a525bef3bbed2ea60a71f71b3740c82df125c36 Mon Sep 17 00:00:00 2001 From: AndrewEichmann-NOAA <58948505+AndrewEichmann-NOAA@users.noreply.github.com> Date: Thu, 7 Mar 2024 16:12:31 -0500 Subject: [PATCH] Add global-workflow infrastructure for ocean analysis recentering task (#2299) Adds jjob, rocoto script, config file, and basic `config.resources` entry for ocean analysis recentering task This PR is a dependency for further work on the associated issue within global-workflow and GDASApp Refs https://github.com/NOAA-EMC/GDASApp/issues/912 --- env/HERA.env | 8 ++++ env/ORION.env | 8 ++++ jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_ECEN | 38 +++++++++++++++ jobs/rocoto/ocnanalecen.sh | 23 +++++++++ parm/config/gfs/config.ocnanalecen | 11 +++++ parm/config/gfs/config.resources | 67 +++++++++++++++++++-------- sorc/link_workflow.sh | 4 ++ 7 files changed, 140 insertions(+), 19 deletions(-) create mode 100755 jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_ECEN create mode 100755 jobs/rocoto/ocnanalecen.sh create mode 100644 parm/config/gfs/config.ocnanalecen diff --git a/env/HERA.env b/env/HERA.env index 2029a69328..f55434e8d9 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -107,6 +107,14 @@ elif [[ "${step}" = "ocnanalchkpt" ]]; then export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalchkpt}" +elif [[ "${step}" = "ocnanalecen" ]]; then + + nth_max=$((npe_node_max / npe_node_ocnanalecen)) + + export NTHREADS_OCNANALECEN=${nth_ocnanalecen:-${nth_max}} + [[ ${NTHREADS_OCNANALECEN} -gt ${nth_max} ]] && export NTHREADS_OCNANALECEN=${nth_max} + export APRUN_OCNANALECEN="${launcher} -n ${npe_ocnanalecen} --cpus-per-task=${NTHREADS_OCNANALECEN}" + elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then export MKL_NUM_THREADS=4 diff --git a/env/ORION.env b/env/ORION.env index 692fa8ab66..17d0d24d97 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -118,6 +118,14 @@ elif [[ "${step}" = "ocnanalchkpt" ]]; then [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max} export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalchkpt} --cpus-per-task=${NTHREADS_OCNANAL}" +elif [[ "${step}" = "ocnanalecen" ]]; then + + nth_max=$((npe_node_max / npe_node_ocnanalecen)) + + export NTHREADS_OCNANALECEN=${nth_ocnanalecen:-${nth_max}} + [[ ${NTHREADS_OCNANALECEN} -gt ${nth_max} ]] && export NTHREADS_OCNANALECEN=${nth_max} + export APRUN_OCNANALECEN="${launcher} -n ${npe_ocnanalecen} --cpus-per-task=${NTHREADS_OCNANALECEN}" + elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then export MKL_NUM_THREADS=4 diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_ECEN b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_ECEN new file mode 100755 index 0000000000..c4ad80c9e3 --- /dev/null +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_ECEN @@ -0,0 +1,38 @@ +#!/bin/bash +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnanalecen" -c "base ocnanal ocnanalecen" + +############################################## +# Set variables used in the script +############################################## + +############################################## +# Begin JOB SPECIFIC work +############################################## + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASOCNCENPY:-${HOMEgfs}/scripts/exgdas_global_marine_analysis_ecen.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd "${DATAROOT}" || exit 1 +[[ "${KEEPDATA}" = "NO" ]] && rm -rf "${DATA}" + +exit 0 diff --git a/jobs/rocoto/ocnanalecen.sh b/jobs/rocoto/ocnanalecen.sh new file mode 100755 index 0000000000..c5fdbbbf32 --- /dev/null +++ b/jobs/rocoto/ocnanalecen.sh @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="ocnanalecen" +export jobid="${job}.$$" + +############################################################### +# Setup Python path for GDASApp ush +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${HOMEgfs}/sorc/gdas.cd/ush" +export PYTHONPATH + +############################################################### +# Execute the JJOB +"${HOMEgfs}"/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_ECEN +status=$? +exit "${status}" diff --git a/parm/config/gfs/config.ocnanalecen b/parm/config/gfs/config.ocnanalecen new file mode 100644 index 0000000000..b64c2bcf62 --- /dev/null +++ b/parm/config/gfs/config.ocnanalecen @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalecen ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalecen" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalecen + +echo "END: config.ocnanalecen" diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index 7e1f66cb04..a78bdb1384 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -23,7 +23,7 @@ if (( $# != 1 )); then echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" echo "wavegempak waveawipsbulls waveawipsgridded" echo "postsnd awips gempak npoess" - echo "ocnanalprep prepoceanobs ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + echo "ocnanalprep prepoceanobs ocnanalbmat ocnanalrun ocnanalecen ocnanalchkpt ocnanalpost ocnanalvrfy" exit 1 fi @@ -356,12 +356,12 @@ case ${step} in "ocnanalbmat") npes=16 - case ${CASE} in - "C384") npes=480;; - "C96") npes=16;; - "C48") npes=16;; + case ${OCNRES} in + "025") npes=480;; + "050") npes=16;; + "500") npes=16;; *) - echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${CASE}" + echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${OCNRES}" exit 4 esac @@ -374,20 +374,21 @@ case ${step} in "ocnanalrun") npes=16 - case ${CASE} in - "C384") - npes=480 - memory_ocnanalrun="128GB" + case ${OCNRES} in + "025") + npes=40 + memory_ocnanalrun="96GB" ;; - "C96") + "050") npes=16 + memory_ocnanalrun="96GB" ;; - "C48") + "500") npes=16 - memory_ocnanalrun="64GB" + memory_ocnanalrun="24GB" ;; *) - echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${CASE}" + echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${OCNRES}" exit 4 esac @@ -399,23 +400,51 @@ case ${step} in export memory_ocnanalrun ;; + "ocnanalecen") + npes=16 + case ${OCNRES} in + "025") + npes=40 + memory_ocnanalecen="96GB" + ;; + "050") + npes=16 + memory_ocnanalecen="96GB" + ;; + "500") + npes=16 + memory_ocnanalecen="24GB" + ;; + *) + echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${OCNRES}" + exit 4 + esac + + export wtime_ocnanalecen="00:10:00" + export npe_ocnanalecen=${npes} + export nth_ocnanalecen=1 + export is_exclusive=True + export npe_node_ocnanalecen=$(( npe_node_max / nth_ocnanalecen )) + export memory_ocnanalecen + ;; + "ocnanalchkpt") export wtime_ocnanalchkpt="00:10:00" export npe_ocnanalchkpt=1 export nth_ocnanalchkpt=1 export npe_node_ocnanalchkpt=$(( npe_node_max / nth_ocnanalchkpt )) - case ${CASE} in - "C384") + case ${OCNRES} in + "025") memory_ocnanalchkpt="128GB" npes=40;; - "C96") + "050") memory_ocnanalchkpt="32GB" npes=16;; - "C48") + "500") memory_ocnanalchkpt="32GB" npes=8;; *) - echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${CASE}" + echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${OCNRES}" exit 4 esac export npe_ocnanalchkpt=${npes} diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index 97af110077..2b54f3ea10 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -323,6 +323,10 @@ if [[ -d "${HOMEgfs}/sorc/gdas.cd/build" ]]; then "fv3jedi_enshofx.x" \ "fv3jedi_hofx_nomodel.x" \ "fv3jedi_testdata_downloader.py" \ + "gdas_ens_handler.x" \ + "gdas_incr_handler.x" \ + "gdas_obsprovider2ioda.x" \ + "gdas_socahybridweights.x" \ "soca_convertincrement.x" \ "soca_error_covariance_training.x" \ "soca_setcorscales.x" \