Skip to content

Commit

Permalink
Merge pull request #58 from HopkinsIDD/init_files
Browse files Browse the repository at this point in the history
Init files
  • Loading branch information
shauntruelove committed Jul 17, 2023
2 parents 74ed274 + 514ef1d commit 1552575
Show file tree
Hide file tree
Showing 32 changed files with 4,458 additions and 1,149 deletions.
70 changes: 47 additions & 23 deletions batch/SLURM_inference_job.run
Original file line number Diff line number Diff line change
Expand Up @@ -28,34 +28,37 @@ which Rscript
export PATH=~/aws-cli/bin:$PATH
echo "***************** DONE LOADING ENVIRONMENT *****************"

# If running from zsh, this ensure the compatibility of using space separated words as bash array
setopt shwordsplit


echo "***************** FETCHING RESUME FILES *****************"
### In case of resume, download or move the right files
export LAST_JOB_OUTPUT=$(echo $LAST_JOB_OUTPUT | sed 's/\/$//')
if [ -n "$LAST_JOB_OUTPUT" ]; then # -n Checks if the length of a string is nonzero --> if LAST_JOB_OUTPUT is not empty, the we download the output from the last job
if [ $FLEPI_BLOCK_INDEX -eq 1 ]; then # always true for slurm submissions
if [[ -n "$LAST_JOB_OUTPUT" ]]; then # -n Checks if the length of a string is nonzero --> if LAST_JOB_OUTPUT is not empty, the we download the output from the last job
if [[ $FLEPI_BLOCK_INDEX -eq 1 ]]; then # always true for slurm submissions
export RESUME_RUN_INDEX=$OLD_FLEPI_RUN_INDEX
echo "RESUME_DISCARD_SEEDING is set to $RESUME_DISCARD_SEEDING"
if [ $RESUME_DISCARD_SEEDING == "true" ]; then
export PARQUET_TYPES="spar snpi hpar hnpi"
if [[ $RESUME_DISCARD_SEEDING == "true" ]]; then
export PARQUET_TYPES="spar snpi hpar hnpi init"
else
export PARQUET_TYPES="seed spar snpi hpar hnpi"
export PARQUET_TYPES="seed spar snpi hpar hnpi init"
fi
else # if we are not in the first block, we need to resume from the last job, with seeding an all.
export RESUME_RUN_INDEX=$FLEPI_RUN_INDEX
export PARQUET_TYPES="seed spar snpi seir hpar hnpi hosp llik"
export PARQUET_TYPES="seed spar snpi seir hpar hnpi hosp llik init"
fi
for filetype in $PARQUET_TYPES
do
if [ $filetype == "seed" ]; then
if [[ $filetype == "seed" ]]; then
export extension="csv"
else
export extension="parquet"
fi
for liketype in "global" "chimeric"
do
export OUT_FILENAME=$(python -c "from gempyor import file_paths; print(file_paths.create_file_name('$FLEPI_RUN_INDEX','$FLEPI_PREFIX/$FLEPI_RUN_INDEX/$liketype/intermediate/%09d.'% $FLEPI_SLOT_INDEX,$FLEPI_BLOCK_INDEX-1,'$filetype','$extension'))")
if [ $FLEPI_BLOCK_INDEX -eq 1 ]; then
if [[ $FLEPI_BLOCK_INDEX -eq 1 ]]; then
export IN_FILENAME=$(python -c "from gempyor import file_paths; print(file_paths.create_file_name('$RESUME_RUN_INDEX','$FLEPI_PREFIX/$RESUME_RUN_INDEX/$liketype/final/',$FLEPI_SLOT_INDEX,'$filetype','$extension'))")
else
export IN_FILENAME=$OUT_FILENAME
Expand All @@ -69,18 +72,38 @@ if [ -n "$LAST_JOB_OUTPUT" ]; then # -n Checks if the length of a string is non
mkdir -p $OUT_FILENAME_DIR
cp $LAST_JOB_OUTPUT/$IN_FILENAME $OUT_FILENAME
fi
if [ -f $OUT_FILENAME ]; then
if [[ -f $OUT_FILENAME ]]; then
echo "Copy successful for file of type $filetype ($IN_FILENAME -> $OUT_FILENAME)"
else
echo "Could not copy file of type $filetype ($IN_FILENAME -> $OUT_FILENAME)"
if [ $liktype -eq "global" ]; then
exit 2
fi
fi
done
done
ls -ltr model_output
fi

if [[ $FLEPI_CONTINUATION == "TRUE" ]]; then
echo "We are doing a continuation"
export INIT_FILENAME=$(python -c "from gempyor import file_paths; print(file_paths.create_file_name('$FLEPI_RUN_INDEX','$FLEPI_PREFIX/$FLEPI_RUN_INDEX/global/intermediate/%09d.'% $FLEPI_SLOT_INDEX,$FLEPI_BLOCK_INDEX-1,'$FLEPI_CONTINUATION_FTYPE','$extension'))")
# in filename is always a seir file
export IN_FILENAME=$(python -c "from gempyor import file_paths; print(file_paths.create_file_name('$FLEPI_CONTINUATION_RUN_ID','$FLEPI_PREFIX/$FLEPI_CONTINUATION_RUN_ID/global/final/',$FLEPI_SLOT_INDEX,'seir','$extension'))")
if [[ $FLEPI_CONTINUATION_LOCATION == *"s3://"* ]]; then
aws s3 cp --quiet $FLEPI_CONTINUATION_LOCATION/$IN_FILENAME $INIT_FILENAME
else
# cp does not create directorys, so we make the directories first
export $OUT_FILENAME_DIR="$(dirname "${INIT_FILENAME}")"
mkdir -p $OUT_FILENAME_DIR
cp $FLEPI_CONTINUATION_LOCATION/$IN_FILENAME $INIT_FILENAME
fi
if [[ -f $INIT_FILENAME ]]; then
echo "CONTINUATION: Copy successful for file of type $filetype ($IN_FILENAME -> $INIT_FILENAME)"
else
echo "CONTINUATION: Could not copy file of type $filetype ($IN_FILENAME -> $INIT_FILENAME)"
fi
#Rscript $FLEPI_PATH/flepimop/main_scripts/seir_init_immuneladder.R --res_config config_SMH_R17_noBoo_lowIE_phase2_blk2.yml
#Rscript $FLEPI_PATH/preprocessing/seir_init_immuneladder_r17phase3_preOm.R --res_config config_SMH_R17_noBoo_lowIE_phase2_blk2.yml
fi

ls -ltr model_output
echo "***************** DONE FETCHING RESUME FILES *****************"

echo "***************** RUNNING INFERENCE_MAIN.R *****************"
Expand All @@ -100,20 +123,20 @@ echo "Rscript $FLEPI_PATH/flepimop/main_scripts/inference_slot.R --config $CONFI
--python python
--rpath Rscript
--is-resume $RESUME_RUN # Is this run a resume
--is-interactive FALSE # Is this run an interactive run" > $LOG_FILE 2>&1 &
--is-interactive FALSE # Is this run an interactive run" #> $LOG_FILE 2>&1 &

Rscript $FLEPI_PATH/flepimop/main_scripts/inference_slot.R -p $FLEPI_PATH --config $CONFIG_PATH --run_id $FLEPI_RUN_INDEX --npi_scenarios $FLEPI_NPI_SCENARIOS --outcome_scenarios $FLEPI_OUTCOME_SCENARIOS --jobs 1 --iterations_per_slot $FLEPI_ITERATIONS_PER_SLOT --this_slot $FLEPI_SLOT_INDEX --this_block 1 --stoch_traj_flag $FLEPI_STOCHASTIC_RUN --is-resume $RESUME_RUN --is-interactive FALSE > $LOG_FILE 2>&1
Rscript $FLEPI_PATH/flepimop/main_scripts/inference_slot.R -p $FLEPI_PATH --config $CONFIG_PATH --run_id $FLEPI_RUN_INDEX --npi_scenarios $FLEPI_NPI_SCENARIOS --outcome_scenarios $FLEPI_OUTCOME_SCENARIOS --jobs 1 --iterations_per_slot $FLEPI_ITERATIONS_PER_SLOT --this_slot $FLEPI_SLOT_INDEX --this_block 1 --stoch_traj_flag $FLEPI_STOCHASTIC_RUN --is-resume $RESUME_RUN --is-interactive FALSE #> $LOG_FILE 2>&1
dvc_ret=$?
if [ $dvc_ret -ne 0 ]; then
if [[ $dvc_ret -ne 0 ]]; then
echo "Error code returned from inference_slot.R: $dvc_ret"
fi
echo "***************** DONE RUNNING INFERENCE_SLOT.R *****************"


echo "***************** UPLOADING RESULT TO S3 (OR NOT) *****************"
## copy to s3 if necessary:
if [ $S3_UPLOAD == "true" ]; then
for type in "seir" "hosp" "llik" "spar" "snpi" "hnpi" "hpar"
if [[ $S3_UPLOAD == "true" ]]; then
for type in "seir" "hosp" "llik" "spar" "snpi" "hnpi" "hpar" "init"
do
export FILENAME=$(python -c "from gempyor import file_paths; print(file_paths.create_file_name('$FLEPI_RUN_INDEX','$FLEPI_PREFIX/$FLEPI_RUN_INDEX/chimeric/intermediate/%09d.'% $FLEPI_SLOT_INDEX,$FLEPI_BLOCK_INDEX,'$type','parquet'))")
aws s3 cp --quiet $FILENAME $S3_RESULTS_PATH/$FILENAME
Expand All @@ -128,12 +151,12 @@ if [ $S3_UPLOAD == "true" ]; then
export FILENAME=$(python -c "from gempyor import file_paths; print(file_paths.create_file_name('$FLEPI_RUN_INDEX','$FLEPI_PREFIX/$FLEPI_RUN_INDEX/global/intermediate/%09d.'% $FLEPI_SLOT_INDEX,$FLEPI_BLOCK_INDEX,'$type','csv'))")
aws s3 cp --quiet $FILENAME $S3_RESULTS_PATH/$FILENAME
done
for type in "seir" "hosp" "llik" "spar" "snpi" "hnpi" "hpar" "memprof"
for type in "seir" "hosp" "llik" "spar" "snpi" "hnpi" "hpar" "memprof" "init"
do
export FILENAME=$(python -c "from gempyor import file_paths; print(file_paths.create_file_name('$FLEPI_RUN_INDEX','$FLEPI_PREFIX/$FLEPI_RUN_INDEX/global/intermediate/%09d.'% $FLEPI_SLOT_INDEX,$FLEPI_BLOCK_INDEX,'$type','parquet'))")
aws s3 cp --quiet $FILENAME $S3_RESULTS_PATH/$FILENAME
done
for type in "seir" "hosp" "llik" "spar" "snpi" "hnpi" "hpar" "memprof"
for type in "seir" "hosp" "llik" "spar" "snpi" "hnpi" "hpar" "memprof" "init"
do
export FILENAME=$(python -c "from gempyor import file_paths; print(file_paths.create_file_name('$FLEPI_RUN_INDEX','$FLEPI_PREFIX/$FLEPI_RUN_INDEX/global/final/', $FLEPI_SLOT_INDEX,'$type','parquet'))")
aws s3 cp --quiet $FILENAME $S3_RESULTS_PATH/$FILENAME
Expand All @@ -149,7 +172,7 @@ echo "***************** DONE UPLOADING RESULT TO S3 (OR NOT) *****************"

# TODO: MV here ? what to do about integration_dump.pkl e.g ?
echo "***************** COPYING RESULTS TO RESULT DIRECTORY *****************"
for type in "seir" "hosp" "llik" "spar" "snpi" "hnpi" "hpar"
for type in "seir" "hosp" "llik" "spar" "snpi" "hnpi" "hpar" "init"
do
export FILENAME=$(python -c "from gempyor import file_paths; print(file_paths.create_file_name('$FLEPI_RUN_INDEX','$FLEPI_PREFIX/$FLEPI_RUN_INDEX/chimeric/intermediate/%09d.'% $FLEPI_SLOT_INDEX,$FLEPI_BLOCK_INDEX,'$type','parquet'))")
export $OUT_FILENAME_DIR="$(dirname "${FS_RESULTS_PATH}/${FILENAME}")"
Expand All @@ -170,14 +193,14 @@ do
mkdir -p $OUT_FILENAME_DIR
cp --parents $FILENAME $FS_RESULTS_PATH
done
for type in "seir" "hosp" "llik" "spar" "snpi" "hnpi" "hpar" "memprof"
for type in "seir" "hosp" "llik" "spar" "snpi" "hnpi" "hpar" "memprof" "init"
do
export FILENAME=$(python -c "from gempyor import file_paths; print(file_paths.create_file_name('$FLEPI_RUN_INDEX','$FLEPI_PREFIX/$FLEPI_RUN_INDEX/global/intermediate/%09d.'% $FLEPI_SLOT_INDEX,$FLEPI_BLOCK_INDEX,'$type','parquet'))")
export $OUT_FILENAME_DIR="$(dirname "${FS_RESULTS_PATH}/${FILENAME}")"
mkdir -p $OUT_FILENAME_DIR
cp --parents $FILENAME $FS_RESULTS_PATH
done
for type in "seir" "hosp" "llik" "spar" "snpi" "hnpi" "hpar" "memprof"
for type in "seir" "hosp" "llik" "spar" "snpi" "hnpi" "hpar" "memprof" "init"
do
export FILENAME=$(python -c "from gempyor import file_paths; print(file_paths.create_file_name('$FLEPI_RUN_INDEX','$FLEPI_PREFIX/$FLEPI_RUN_INDEX/global/final/', $FLEPI_SLOT_INDEX,'$type','parquet'))")
export $OUT_FILENAME_DIR="$(dirname "${FS_RESULTS_PATH}/${FILENAME}")"
Expand All @@ -204,5 +227,6 @@ echo "DONE EVERYTHING."
# --> THIS DOES NOT WORK
#mv slurm-$SLURM_ARRAY_JOB_ID_${SLURM_ARRAY_TASK_ID}.out $FS_RESULTS_PATH/slurm-$SLURM_ARRAY_JOB_ID_${SLURM_ARRAY_TASK_ID}.out

unsetopt shwordsplit

wait
8 changes: 4 additions & 4 deletions batch/SLURM_inference_runner.sh
Original file line number Diff line number Diff line change
Expand Up @@ -128,12 +128,12 @@ if [ $S3_UPLOAD == "true" ]; then
export FILENAME=$(python -c "from gempyor import file_paths; print(file_paths.create_file_name('$FLEPI_RUN_INDEX','$FLEPI_PREFIX/$FLEPI_RUN_INDEX/global/intermediate/%09d.'% $FLEPI_SLOT_INDEX,$FLEPI_BLOCK_INDEX,'$type','csv'))")
aws s3 cp --quiet $FILENAME $S3_RESULTS_PATH/$FILENAME
done
for type in "seir" "hosp" "llik" "spar" "snpi" "hnpi" "hpar"
for type in "seir" "hosp" "llik" "spar" "snpi" "hnpi" "hpar" "memprof"
do
export FILENAME=$(python -c "from gempyor import file_paths; print(file_paths.create_file_name('$FLEPI_RUN_INDEX','$FLEPI_PREFIX/$FLEPI_RUN_INDEX/global/intermediate/%09d.'% $FLEPI_SLOT_INDEX,$FLEPI_BLOCK_INDEX,'$type','parquet'))")
aws s3 cp --quiet $FILENAME $S3_RESULTS_PATH/$FILENAME
done
for type in "seir" "hosp" "llik" "spar" "snpi" "hnpi" "hpar"
for type in "seir" "hosp" "llik" "spar" "snpi" "hnpi" "hpar" "memprof"
do
export FILENAME=$(python -c "from gempyor import file_paths; print(file_paths.create_file_name('$FLEPI_RUN_INDEX','$FLEPI_PREFIX/$FLEPI_RUN_INDEX/global/final/', $FLEPI_SLOT_INDEX,'$type','parquet'))")
aws s3 cp --quiet $FILENAME $S3_RESULTS_PATH/$FILENAME
Expand Down Expand Up @@ -170,14 +170,14 @@ do
mkdir -p $OUT_FILENAME_DIR
cp --parents $FILENAME $FS_RESULTS_PATH
done
for type in "seir" "hosp" "llik" "spar" "snpi" "hnpi" "hpar"
for type in "seir" "hosp" "llik" "spar" "snpi" "hnpi" "hpar" "memprof"
do
export FILENAME=$(python -c "from gempyor import file_paths; print(file_paths.create_file_name('$FLEPI_RUN_INDEX','$FLEPI_PREFIX/$FLEPI_RUN_INDEX/global/intermediate/%09d.'% $FLEPI_SLOT_INDEX,$FLEPI_BLOCK_INDEX,'$type','parquet'))")
export $OUT_FILENAME_DIR="$(dirname "${FS_RESULTS_PATH}/${FILENAME}")"
mkdir -p $OUT_FILENAME_DIR
cp --parents $FILENAME $FS_RESULTS_PATH
done
for type in "seir" "hosp" "llik" "spar" "snpi" "hnpi" "hpar"
for type in "seir" "hosp" "llik" "spar" "snpi" "hnpi" "hpar" "memprof"
do
export FILENAME=$(python -c "from gempyor import file_paths; print(file_paths.create_file_name('$FLEPI_RUN_INDEX','$FLEPI_PREFIX/$FLEPI_RUN_INDEX/global/final/', $FLEPI_SLOT_INDEX,'$type','parquet'))")
export $OUT_FILENAME_DIR="$(dirname "${FS_RESULTS_PATH}/${FILENAME}")"
Expand Down
10 changes: 9 additions & 1 deletion batch/SLURM_postprocess_runner.run
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,17 @@ conda activate flepimop-env
which python
which Rscript

# aws cli to export plots (location according to instruction)
export PATH=~/aws-cli/bin:$PATH

# move all the slurm logs into the right folder:
mv slurm-$SLURM_ARRAY_JOB_ID_${SLURM_ARRAY_TASK_ID}.out $FS_RESULTS_PATH/slurm-$SLURM_ARRAY_JOB_ID_${SLURM_ARRAY_TASK_ID}.out

curl \
-H "Title: $FLEPI_RUN_INDEX Done ✅" \
-H "Priority: urgent" \
-H "Tags: warning,snail" \
-d "TODO say how many failure and stuff" \
-d "Hopefully the results look alright" \
ntfy.sh/flepimop_alerts

# get the slack credentials
Expand All @@ -37,3 +40,8 @@ mkdir pplot

source $FLEPI_PATH/batch/postprocessing-scripts.sh

cp -R pplot $FS_RESULTS_PATH
if [[ $S3_UPLOAD == "true" ]]; then
aws s3 cp --quiet pplot $S3_RESULTS_PATH/pplot
fi

0 comments on commit 1552575

Please sign in to comment.