Skip to content

Commit

Permalink
Merge pull request #551 from mdecleir/testing
Browse files Browse the repository at this point in the history
Test for the write_sbatch_file tool
  • Loading branch information
karllark committed May 4, 2020
2 parents a637b33 + 9f2f408 commit 8929408
Show file tree
Hide file tree
Showing 2 changed files with 41 additions and 6 deletions.
36 changes: 36 additions & 0 deletions beast/tools/tests/test_write_sbatch_file.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
from tempfile import NamedTemporaryFile
from beast.tools.write_sbatch_file import write_sbatch_file

def test_sbatch_file():
temp_file = NamedTemporaryFile(suffix=".script")
write_sbatch_file(temp_file.name, "./mastergrid_LMC/model_batch_jobs/create_physicsmodel_\"${SLURM_ARRAY_TASK_ID}\".job",
"/pylon5/as5pi7p/lhagen", modules=["module load anaconda3", "source activate bdev"], job_name="LMCgrid", egress=True, queue="LM",
stdout_file="/pylon5/as5pi7p/lhagen/mastergrid_LMC/model_batch_jobs/logs/%A_%a.out", run_time="35:00:00", mem="570GB", array=[1,9])

file = open(temp_file.name)
content = file.read()

expected = ("""#!/bin/bash
#SBATCH -J LMCgrid
#SBATCH -o /pylon5/as5pi7p/lhagen/mastergrid_LMC/model_batch_jobs/logs/%A_%a.out
#SBATCH -C EGRESS
#SBATCH -p LM
#SBATCH -t 35:00:00
#SBATCH --mem 570GB
#SBATCH --array=1-9
# move to appropriate directory
cd /pylon5/as5pi7p/lhagen
# Load any necessary modules.
# Loading modules in the script ensures a consistent environment.
module load anaconda3
source activate bdev
# Launch a job
./mastergrid_LMC/model_batch_jobs/create_physicsmodel_\"${SLURM_ARRAY_TASK_ID}\".job
""")

assert content == expected, "The created sbatch file does not have the expected output."
11 changes: 5 additions & 6 deletions beast/tools/write_sbatch_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def write_sbatch_file(
directory : string
the directory that slurm should assume you're working in
modules : string or list of strings (default=['module load anaconda3','source activate bdev']
modules : string or list of strings (default=['module load anaconda3','source activate bdev']
modules to load before running job
job_name : string (default='beast')
Expand All @@ -56,18 +56,18 @@ def write_sbatch_file(
Maximum run time (hh:mm:ss). If your job is shorter, it's fine.
mem : string (default='128GB')
For bridges large, the memory to allocate to the job. If your job uses
For bridges large, the memory to allocate to the job. If your job uses
less memory than this, the full amount will still be charged.
array : list of two ints (default=None)
If set, #SBATCH --array=[0]-[1] will be included. In this case, make
If set, #SBATCH --array=[0]-[1] will be included. In this case, make
sure to use "${SLURM_ARRAY_TASK_ID}" somewhere in the job command.
"""

with open(file_name, "w") as f:

f.write("#!/bin/bash \n")
f.write("#!/bin/bash\n")
f.write("\n")

f.write("#SBATCH -J " + job_name + "\n")
Expand All @@ -76,7 +76,7 @@ def write_sbatch_file(
f.write("#SBATCH -o " + stdout_file + "\n")

if egress:
f.write("#SBATCH -C EGRESS \n")
f.write("#SBATCH -C EGRESS\n")

f.write("#SBATCH -p " + queue + "\n")
f.write("#SBATCH -t " + run_time + "\n")
Expand Down Expand Up @@ -106,7 +106,6 @@ def write_sbatch_file(
elif isinstance(job_command, list):
for item in job_command:
f.write(item + "\n")

f.write("\n")


Expand Down

0 comments on commit 8929408

Please sign in to comment.