Skip to content

Commit

Permalink
Merge pull request #107 from HECBioSim/issue98
Browse files Browse the repository at this point in the history
Extended memory parameter to be used in all schedulers.
  • Loading branch information
jimboid committed Aug 3, 2018
2 parents 585dc21 + cba0666 commit 49735f7
Show file tree
Hide file tree
Showing 13 changed files with 218 additions and 5 deletions.
4 changes: 4 additions & 0 deletions longbow/schedulers/lsf.py
Expand Up @@ -98,6 +98,10 @@ def prepare(job):

jobfile.write("#BSUB -m " + job["lsf-cluster"] + "\n")

if job["memory"] is not "":

jobfile.write('#BSUB -R "rusage[mem=' + job["memory"] + 'G]"\n')

# Account to charge (if supplied).
if job["account"] is not "":

Expand Down
7 changes: 2 additions & 5 deletions longbow/schedulers/pbs.py
Expand Up @@ -131,16 +131,13 @@ def prepare(job):
# Number of mpi processes per node.
mpiprocs = cpn

# Memory size (used to select nodes with minimum memory).
memory = job["memory"]

tmp = "select=" + nodes + ":ncpus=" + ncpus + ":mpiprocs=" + mpiprocs

# If user has specified memory append the flag (not all machines support
# this).
if memory is not "":
if job["memory"] is not "":

tmp = tmp + ":mem=" + memory + "gb"
tmp = tmp + ":mem=" + job["memory"] + "gb"

# Write the resource requests
jobfile.write("#PBS -l " + tmp + "\n")
Expand Down
4 changes: 4 additions & 0 deletions longbow/schedulers/sge.py
Expand Up @@ -102,6 +102,10 @@ def prepare(job):

jobfile.write("#$ -l h_rt=" + job["maxtime"] + ":00\n")

if job["memory"] is not "":

jobfile.write("#$ -l h_vmem=" + job["memory"] + "G\n")

# Email user.
if job["email-address"] is not "":

Expand Down
4 changes: 4 additions & 0 deletions longbow/schedulers/slurm.py
Expand Up @@ -100,6 +100,10 @@ def prepare(job):
jobfile.write("#SBATCH " + job["accountflag"] + " " +
job["account"] + "\n")

if job["memory"] is not "":

jobfile.write("#SBATCH --mem=" + job["memory"] + "G" + "\n")

# Generic resource (if supplied)
if job["slurm-gres"] is not "":

Expand Down
4 changes: 4 additions & 0 deletions longbow/schedulers/soge.py
Expand Up @@ -103,6 +103,10 @@ def prepare(job):

jobfile.write("#$ -l h_rt=" + job["maxtime"] + ":00\n")

if job["memory"] is not "":

jobfile.write("#$ -l h_vmem=" + job["memory"] + "G\n")

# Email user.
if job["email-address"] is not "":

Expand Down
10 changes: 10 additions & 0 deletions tests/standards/lsf_submitfiles/case9.txt
@@ -0,0 +1,10 @@
#!/bin/bash --login
#BSUB -J testjob
#BSUB -q debug
#BSUB -R "rusage[mem=10G]"
#BSUB -W 24:00
#BSUB -n 24

module load amber

mpiexec.hydra pmemd.MPI -O -i e.in -c e.min -p e.top -o e.out
9 changes: 9 additions & 0 deletions tests/standards/sge_submitfiles/case8.txt
@@ -0,0 +1,9 @@
#!/bin/bash --login
#$ -cwd -V
#$ -N testjob
#$ -q debug
#$ -l h_rt=24:00:00
#$ -l h_vmem=10G
module load amber

mpiexec pmemd.MPI -O -i e.in -c e.min -p e.top -o e.out
15 changes: 15 additions & 0 deletions tests/standards/slurm_submitfiles/case8.txt
@@ -0,0 +1,15 @@
#!/bin/bash --login
#SBATCH -J testjob
#SBATCH -p debug
#SBATCH --mem=10G
#SBATCH --gres=gpu:1
#SBATCH -n 24
#SBATCH -N 1
#SBATCH -t 24:00:00

ls /dir
cd /dir

module load amber

mpirun pmemd.MPI -O -i e.in -c e.min -p e.top -o e.out
12 changes: 12 additions & 0 deletions tests/standards/soge_submitfiles/case8.txt
@@ -0,0 +1,12 @@
#!/bin/bash --login
#$ -cwd -V
#$ -N testjob
#$ -q debug
#$ -l h_rt=24:00:00
#$ -l h_vmem=10G
#$ -l nodes=1
#$ -pe ib 12

module load amber

mpiexec pmemd.MPI -O -i e.in -c e.min -p e.top -o e.out
42 changes: 42 additions & 0 deletions tests/unit/schedulers_lsf/test_lsf_prepare.py
Expand Up @@ -55,6 +55,7 @@ def test_prepare_case1():
"localworkdir": "/tmp",
"lsf-cluster": "",
"maxtime": "24:00",
"memory": "",
"modules": "amber",
"queue": "debug",
"replicates": "1",
Expand Down Expand Up @@ -89,6 +90,7 @@ def test_prepare_case2():
"localworkdir": "/tmp",
"lsf-cluster": "",
"maxtime": "24:00",
"memory": "",
"modules": "amber",
"queue": "debug",
"replicates": "5",
Expand Down Expand Up @@ -121,6 +123,7 @@ def test_prepare_case3():
"localworkdir": "/tmp",
"lsf-cluster": "cluster1",
"maxtime": "24:00",
"memory": "",
"modules": "amber",
"queue": "debug",
"replicates": "1",
Expand Down Expand Up @@ -154,6 +157,7 @@ def test_prepare_case4():
"localworkdir": "/tmp",
"lsf-cluster": "",
"maxtime": "24:00",
"memory": "",
"modules": "amber",
"queue": "debug",
"replicates": "1",
Expand Down Expand Up @@ -187,6 +191,7 @@ def test_prepare_case5():
"localworkdir": "/tmp",
"lsf-cluster": "",
"maxtime": "24:00",
"memory": "",
"modules": "amber",
"queue": "debug",
"replicates": "1",
Expand Down Expand Up @@ -220,6 +225,7 @@ def test_prepare_case6():
"localworkdir": "/tmp",
"lsf-cluster": "",
"maxtime": "24:00",
"memory": "",
"modules": "amber",
"queue": "debug",
"replicates": "1",
Expand Down Expand Up @@ -253,6 +259,7 @@ def test_prepare_case7():
"localworkdir": "/tmp",
"lsf-cluster": "",
"maxtime": "24:00",
"memory": "",
"modules": "amber",
"queue": "debug",
"replicates": "1",
Expand Down Expand Up @@ -286,6 +293,7 @@ def test_prepare_case8():
"localworkdir": "/tmp",
"lsf-cluster": "",
"maxtime": "24:00",
"memory": "",
"modules": "amber",
"queue": "debug",
"replicates": "1",
Expand All @@ -299,3 +307,37 @@ def test_prepare_case8():
os.path.join(
os.getcwd(),
"tests/standards/lsf_submitfiles/case8.txt"), "rb").read()


def test_prepare_case9():

"""
Test handler parameters
"""

job = {
"account": "",
"accountflag": "",
"cores": "24",
"executableargs": "pmemd.MPI -O -i e.in -c e.min -p e.top -o e.out",
"handler": "mpiexec.hydra",
"email-address": "",
"email-flags": "",
"jobname": "testjob",
"localworkdir": "/tmp",
"lsf-cluster": "",
"maxtime": "24:00",
"memory": "10",
"modules": "amber",
"queue": "debug",
"replicates": "1",
"scripts": "",
"upload-include": "file1, file2"
}

prepare(job)

assert open("/tmp/submit.lsf", "rb").read() == open(
os.path.join(
os.getcwd(),
"tests/standards/lsf_submitfiles/case9.txt"), "rb").read()
37 changes: 37 additions & 0 deletions tests/unit/schedulers_sge/test_sge_prepare.py
Expand Up @@ -295,3 +295,40 @@ def test_prepare_case7():
os.path.join(
os.getcwd(),
"tests/standards/sge_submitfiles/case7.txt"), "rb").read()


def test_prepare_case8():

"""
Test memory param
"""

job = {
"account": "",
"accountflag": "",
"cluster": "",
"cores": "1",
"corespernode": "",
"executableargs": "pmemd.MPI -O -i e.in -c e.min -p e.top -o e.out",
"handler": "mpiexec",
"email-address": "",
"email-flags": "",
"jobname": "testjob",
"localworkdir": "/tmp",
"maxtime": "24:00",
"memory": "10",
"modules": "amber",
"queue": "debug",
"replicates": "1",
"scripts": "",
"sge-peflag": "mpi",
"sge-peoverride": "false",
"upload-include": "file1, file2"
}

prepare(job)

assert open("/tmp/submit.sge", "rb").read() == open(
os.path.join(
os.getcwd(),
"tests/standards/sge_submitfiles/case8.txt"), "rb").read()
38 changes: 38 additions & 0 deletions tests/unit/schedulers_slurm/test_slurm_prepare.py
Expand Up @@ -302,3 +302,41 @@ def test_prepare_case7():
os.path.join(
os.getcwd(),
"tests/standards/slurm_submitfiles/case7.txt"), "rb").read()


def test_prepare_case8():

"""
Test gres parameters
"""

job = {
"account": "",
"accountflag": "",
"cluster": "",
"cores": "24",
"corespernode": "24",
"executableargs": "pmemd.MPI -O -i e.in -c e.min -p e.top -o e.out",
"handler": "mpirun",
"email-address": "",
"email-flags": "",
"jobname": "testjob",
"localworkdir": "/tmp",
"maxtime": "24:00",
"memory": "10",
"modules": "amber",
"queue": "debug",
"replicates": "1",
"scripts": "ls /dir, cd /dir",
"slurm-gres": "gpu:1",
"sge-peflag": "mpi",
"sge-peoverride": "false",
"upload-include": "file1, file2"
}

prepare(job)

assert open("/tmp/submit.slurm", "rb").read() == open(
os.path.join(
os.getcwd(),
"tests/standards/slurm_submitfiles/case8.txt"), "rb").read()
37 changes: 37 additions & 0 deletions tests/unit/schedulers_soge/test_soge_prepare.py
Expand Up @@ -295,3 +295,40 @@ def test_prepare_case7():
os.path.join(
os.getcwd(),
"tests/standards/soge_submitfiles/case7.txt"), "rb").read()


def test_prepare_case8():

"""
Test under subscription
"""

job = {
"account": "",
"accountflag": "",
"cluster": "",
"cores": "12",
"corespernode": "24",
"executableargs": "pmemd.MPI -O -i e.in -c e.min -p e.top -o e.out",
"handler": "mpiexec",
"email-address": "",
"email-flags": "",
"jobname": "testjob",
"localworkdir": "/tmp",
"maxtime": "24:00",
"memory": "10",
"modules": "amber",
"queue": "debug",
"replicates": "1",
"scripts": "",
"sge-peflag": "mpi",
"sge-peoverride": "false",
"upload-include": "file1, file2"
}

prepare(job)

assert open("/tmp/submit.soge", "rb").read() == open(
os.path.join(
os.getcwd(),
"tests/standards/soge_submitfiles/case8.txt"), "rb").read()

0 comments on commit 49735f7

Please sign in to comment.