Skip to content

Commit

Permalink
Merge pull request #188 from mkavulich/run_on_cheyenne
Browse files Browse the repository at this point in the history
Running on Cheyenne
  • Loading branch information
mkavulich committed Apr 13, 2020
2 parents 7ee88a8 + 395a20b commit 01e6252
Show file tree
Hide file tree
Showing 19 changed files with 217 additions and 159 deletions.
2 changes: 1 addition & 1 deletion jobs/JREGIONAL_MAKE_ICS
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ case "$MACHINE" in
;;
#
"CHEYENNE")
#
APRUN="mpirun -np ${NPROCS}"
;;
#
esac
Expand Down
2 changes: 1 addition & 1 deletion jobs/JREGIONAL_MAKE_LBCS
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ case "$MACHINE" in
;;
#
"CHEYENNE")
#
APRUN="mpirun -np ${NPROCS}"
;;
#
esac
Expand Down
5 changes: 5 additions & 0 deletions modulefiles/tasks/cheyenne/get_extrn_ics
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
#%Module#####################################################
## Module file intentionally blank for Cheyenne
#############################################################


5 changes: 5 additions & 0 deletions modulefiles/tasks/cheyenne/get_extrn_lbcs
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
#%Module#####################################################
## Module file intentionally blank for Cheyenne
#############################################################


9 changes: 9 additions & 0 deletions modulefiles/tasks/cheyenne/make_grid
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
#%Module#####################################################

module purge

module load ncarenv/1.3
module load intel/19.0.2
module load ncarcompilers/0.5.0
module load netcdf/4.6.3

2 changes: 2 additions & 0 deletions modulefiles/tasks/cheyenne/make_ics.local
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
#%Module#####################################################

2 changes: 2 additions & 0 deletions modulefiles/tasks/cheyenne/make_lbcs.local
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
#%Module#####################################################

9 changes: 9 additions & 0 deletions modulefiles/tasks/cheyenne/make_orog.hardcoded
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
#%Module#####################################################

module purge

module load ncarenv/1.3
module load intel/19.0.2
module load ncarcompilers/0.5.0
module load netcdf/4.6.3

5 changes: 5 additions & 0 deletions scripts/exregional_make_grid.sh
Original file line number Diff line number Diff line change
Expand Up @@ -157,6 +157,11 @@ case $MACHINE in
ulimit -a
;;

"CHEYENNE")

export APRUN="time"
export topo_dir="/glade/p/ral/jntp/UFS_CAM/fix/fix_orog"
;;

esac
#
Expand Down
28 changes: 6 additions & 22 deletions scripts/exregional_make_orog.sh
Original file line number Diff line number Diff line change
Expand Up @@ -118,27 +118,6 @@ case $MACHINE in
;;


"THEIA")
#
{ save_shell_opts; set +x; } > /dev/null 2>&1

. /apps/lmod/lmod/init/sh
module purge
module load intel/16.1.150
module load impi
module load hdf5/1.8.14
module load netcdf/4.3.0
module list

{ restore_shell_opts; } > /dev/null 2>&1

export APRUN="time"

ulimit -s unlimited
ulimit -a
;;


"HERA")
ulimit -s unlimited
ulimit -a
Expand All @@ -162,6 +141,11 @@ case $MACHINE in
;;


"CHEYENNE")
export APRUN="time"
export topo_dir="/glade/p/ral/jntp/UFS_CAM/fix/fix_orog"
;;

esac
#
#-----------------------------------------------------------------------
Expand Down Expand Up @@ -334,7 +318,7 @@ ${tmp_dir}" \
;;


"THEIA" | "HERA" | "JET" | "ODIN")
"CHEYENNE" | "HERA" | "JET" | "ODIN")
$APRUN "${exec_fp}" < "${input_redirect_fn}" || \
print_err_msg_exit "\
Call to executable (exec_fp) that generates the raw orography file returned
Expand Down
11 changes: 4 additions & 7 deletions scripts/exregional_make_sfc_climo.sh
Original file line number Diff line number Diff line change
Expand Up @@ -138,13 +138,6 @@ case $MACHINE in
APRUN=${APRUN:-"aprun -j 1 -n 6 -N 6"}
;;

"THEIA")
# Need to load intel/15.1.133. This and all other module loads should go into a module file.
module load intel/15.1.133
module list
APRUN="mpirun -np ${SLURM_NTASKS}"
;;

"HERA")
APRUN="srun"
;;
Expand All @@ -153,6 +146,10 @@ case $MACHINE in
APRUN="srun"
;;

"CHEYENNE")
APRUN="mpirun -np ${NPROCS}"
;;

*)
print_err_msg_exit "\
Run command has not been specified for this machine:
Expand Down
42 changes: 8 additions & 34 deletions scripts/exregional_run_fcst.sh
Original file line number Diff line number Diff line change
Expand Up @@ -108,40 +108,6 @@ case $MACHINE in
APRUN="mpirun -l -np ${PE_MEMBER01}"
;;
#
"THEIA")
#

if [ "${USE_CCPP}" = "TRUE" ]; then

# Need to change to the experiment directory to correctly load necessary
# modules for CCPP-version of FV3SAR in lines below
cd_vrfy ${EXPTDIR}

set +x
source ./module-setup.sh
module use $( pwd -P )
module load modules.fv3
module load contrib wrap-mpi
module list
set -x

else

. /apps/lmod/lmod/init/sh
module purge
module use /scratch4/NCEPDEV/nems/noscrub/emc.nemspara/soft/modulefiles
module load intel/16.1.150 impi/5.1.1.109 netcdf/4.3.0
module load contrib wrap-mpi
module list

fi

ulimit -s unlimited
ulimit -a
np=${SLURM_NTASKS}
APRUN="mpirun -np ${np}"
;;
#
"HERA")
ulimit -s unlimited
ulimit -a
Expand All @@ -165,6 +131,14 @@ case $MACHINE in
APRUN="srun -n ${PE_MEMBER01}"
;;
#
"CHEYENNE")
#
module list

APRUN="mpirun -np ${NPROCS}"
LD_LIBRARY_PATH="${UFS_WTHR_MDL_DIR}/FV3/ccpp/lib${LD_LIBRARY_PATH:+:$LD_LIBRARY_PATH}"
;;
#
esac
#
#-----------------------------------------------------------------------
Expand Down
20 changes: 7 additions & 13 deletions scripts/exregional_run_post.sh
Original file line number Diff line number Diff line change
Expand Up @@ -112,19 +112,6 @@ case $MACHINE in
;;


"THEIA")
{ save_shell_opts; set +x; } > /dev/null 2>&1
module purge
module load intel
module load impi
module load netcdf
module load contrib wrap-mpi
{ restore_shell_opts; } > /dev/null 2>&1
np=${SLURM_NTASKS}
APRUN="mpirun -np ${np}"
;;


"HERA")
# export NDATE=/scratch3/NCEPDEV/nwprod/lib/prod_util/v1.1.0/exec/ndate
APRUN="srun"
Expand All @@ -141,6 +128,13 @@ case $MACHINE in
;;


"CHEYENNE")
module list

APRUN="mpirun -np ${NPROCS}"
;;


esac
#
#-----------------------------------------------------------------------
Expand Down
15 changes: 15 additions & 0 deletions ush/config_defaults.sh
Original file line number Diff line number Diff line change
Expand Up @@ -54,26 +54,41 @@ RUN_ENVIR="nco"
# or is set to an empty string, it will be (re)set to a machine-dependent
# value.
#
# QUEUE_DEFAULT_TAG:
# The rocoto xml tag to use for specifying the default queue. For most
# platforms this should be "queue"
#
# QUEUE_HPSS:
# The queue to which the tasks that get or create links to external model
# files [which are needed to generate initial conditions (ICs) and lateral
# boundary conditions (LBCs)] are submitted. If this is not set or is
# set to an empty string, it will be (re)set to a machine-dependent value.
#
# QUEUE_HPSS_TAG:
# The rocoto xml tag to use for specifying the HPSS queue. For slurm-based
# platforms this is typically "partition", for others it may be "queue"
#
# QUEUE_FCST:
# The queue to which the task that runs a forecast is submitted. If this
# is not set or set to an empty string, it will be (re)set to a machine-
# dependent value.
#
# QUEUE_FCST_TAG:
# The rocoto xml tag to use for specifying the fcst queue. For most
# platforms this should be "queue"
#
# mach_doc_end
#
#-----------------------------------------------------------------------
#
MACHINE="BIG_COMPUTER"
ACCOUNT="project_name"
QUEUE_DEFAULT="batch_queue"
QUEUE_DEFAULT_TAG="queue"
QUEUE_HPSS="hpss_queue"
QUEUE_HPSS_TAG="partition"
QUEUE_FCST="production_queue"
QUEUE_FCST_TAG="queue"
#
#-----------------------------------------------------------------------
#
Expand Down
50 changes: 44 additions & 6 deletions ush/generate_FV3SAR_wflow.sh
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,7 @@ cp_vrfy ${TEMPLATE_XML_FP} ${WFLOW_XML_FP}
#-----------------------------------------------------------------------
#
PROC_RUN_FCST="${NUM_NODES}:ppn=${NCORES_PER_NODE}"
NPROCS_RUN_FCST=$(( ${NUM_NODES} * ${NCORES_PER_NODE} ))

FHR=( $( seq 0 1 ${FCST_LEN_HRS} ) )
i=0
Expand Down Expand Up @@ -147,10 +148,12 @@ fi
#
set_file_param "${WFLOW_XML_FP}" "ACCOUNT" "$ACCOUNT"
set_file_param "${WFLOW_XML_FP}" "SCHED" "$SCHED"
set_file_param "${WFLOW_XML_FP}" "QUEUE_DEFAULT" "${QUEUE_DEFAULT}"
set_file_param "${WFLOW_XML_FP}" "QUEUE_HPSS" "${QUEUE_HPSS}"
set_file_param "${WFLOW_XML_FP}" "QUEUE_FCST" "${QUEUE_FCST}"
set_file_param "${WFLOW_XML_FP}" "QUEUE_DEFAULT" "<${QUEUE_DEFAULT_TAG}>${QUEUE_DEFAULT}</${QUEUE_DEFAULT_TAG}>"
set_file_param "${WFLOW_XML_FP}" "QUEUE_HPSS" "<${QUEUE_HPSS_TAG}>${QUEUE_HPSS}</${QUEUE_HPSS_TAG}>"
set_file_param "${WFLOW_XML_FP}" "QUEUE_FCST" "<${QUEUE_FCST_TAG}>${QUEUE_FCST}</${QUEUE_FCST_TAG}>"
set_file_param "${WFLOW_XML_FP}" "NCORES_PER_NODE" "${NCORES_PER_NODE}"
set_file_param "${WFLOW_XML_FP}" "PROC_RUN_FCST" "${PROC_RUN_FCST}"
set_file_param "${WFLOW_XML_FP}" "NPROCS_RUN_FCST" "${NPROCS_RUN_FCST}"
#
# Directories.
#
Expand Down Expand Up @@ -298,8 +301,20 @@ cat "${MAKE_LBCS_TN}.local" >> "${MAKE_LBCS_TN}"
ln_vrfy -fs "${UFS_WTHR_MDL_DIR}/NEMS/src/conf/modules.nems" \
"${RUN_FCST_TN}"

ln_vrfy -fs "${EMC_POST_DIR}/modulefiles/post/v8.0.0-$machine" \

#Only some platforms build EMC_post using modules
case $MACHINE in

"CHEYENNE")
print_info_msg "No post modulefile needed for $MACHINE"
;;

*)
ln_vrfy -fs "${EMC_POST_DIR}/modulefiles/post/v8.0.0-$machine" \
"${RUN_POST_TN}"
;;

esac

cd_vrfy -
#
Expand Down Expand Up @@ -585,13 +600,36 @@ The experiment directory is:
> EXPTDIR=\"$EXPTDIR\"
To launch the workflow, first ensure that you have a compatible version
"
case $MACHINE in

"CHEYENNE")
print_info_msg "To launch the workflow, first ensure that you have a compatible version
of rocoto in your \$PATH. On Cheyenne, version 1.3.1 has been pre-built; you can load it
in your \$PATH with one of the following commands, depending on your default shell:
bash:
> export PATH=\${PATH}:/glade/p/ral/jntp/tools/rocoto/rocoto-1.3.1/bin/
tcsh:
> setenv PATH \${PATH}:/glade/p/ral/jntp/tools/rocoto/rocoto-1.3.1/bin/
"
;;

*)
print_info_msg "To launch the workflow, first ensure that you have a compatible version
of rocoto loaded. For example, to load version 1.3.1 of rocoto, use
> module load rocoto/1.3.1
(This version has been tested on hera; later versions may also work but
have not been tested.) To launch the workflow, change location to the
have not been tested.)
"
;;

esac
print_info_msg "
To launch the workflow, change location to the
experiment directory (EXPTDIR) and issue the rocotrun command, as fol-
lows:
Expand Down
Loading

0 comments on commit 01e6252

Please sign in to comment.