Skip to content

Commit

Permalink
Make memory limit tests less brittle (#511)
Browse files Browse the repository at this point in the history
format_bytes has changed in dask 2021.4
  • Loading branch information
lesteve committed Jul 22, 2021
1 parent dffe134 commit ccde944
Show file tree
Hide file tree
Showing 6 changed files with 40 additions and 17 deletions.
4 changes: 3 additions & 1 deletion dask_jobqueue/tests/test_htcondor.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from distributed import Client

import dask
from dask.utils import format_bytes, parse_bytes

from dask_jobqueue import HTCondorCluster

Expand Down Expand Up @@ -44,7 +45,8 @@ def test_job_script():
"{} -m distributed.cli.dask_worker tcp://".format(sys.executable)
in job_script
)
assert "--memory-limit 50.00MB" in job_script
formatted_bytes = format_bytes(parse_bytes("50MB")).replace(" ", "")
assert f"--memory-limit {formatted_bytes}" in job_script
assert "--nthreads 2" in job_script
assert "--nprocs 2" in job_script

Expand Down
11 changes: 7 additions & 4 deletions dask_jobqueue/tests/test_lsf.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import dask
import pytest
from dask.distributed import Client
from distributed.utils import parse_bytes
from dask.utils import format_bytes, parse_bytes

from dask_jobqueue import LSFCluster, lsf

Expand Down Expand Up @@ -100,7 +100,8 @@ def test_job_script():
"{} -m distributed.cli.dask_worker tcp://".format(sys.executable)
in job_script
)
assert "--nthreads 2 --nprocs 4 --memory-limit 7.00GB" in job_script
formatted_bytes = format_bytes(parse_bytes("7GB")).replace(" ", "")
assert f"--nthreads 2 --nprocs 4 --memory-limit {formatted_bytes}" in job_script

with LSFCluster(
queue="general",
Expand All @@ -126,7 +127,8 @@ def test_job_script():
"{} -m distributed.cli.dask_worker tcp://".format(sys.executable)
in job_script
)
assert "--nthreads 2 --nprocs 4 --memory-limit 7.00GB" in job_script
formatted_bytes = format_bytes(parse_bytes("7GB")).replace(" ", "")
assert f"--nthreads 2 --nprocs 4 --memory-limit {formatted_bytes}" in job_script

with LSFCluster(
walltime="1:00",
Expand All @@ -149,7 +151,8 @@ def test_job_script():
"{} -m distributed.cli.dask_worker tcp://".format(sys.executable)
in job_script
)
assert "--nthreads 1 --memory-limit 16.00GB" in job_script
formatted_bytes = format_bytes(parse_bytes("16GB")).replace(" ", "")
assert f"--nthreads 1 --memory-limit {formatted_bytes}" in job_script


@pytest.mark.env("lsf")
Expand Down
16 changes: 11 additions & 5 deletions dask_jobqueue/tests/test_oar.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
import sys

from dask_jobqueue import OARCluster
import dask
from dask.utils import format_bytes, parse_bytes

from dask_jobqueue import OARCluster


def test_header():
Expand Down Expand Up @@ -40,7 +42,8 @@ def test_job_script():
job_script = cluster.job_script()
assert "#OAR" in job_script
assert "#OAR -n dask-worker" in job_script
assert "--memory-limit 7.00GB " in job_script
formatted_bytes = format_bytes(parse_bytes("7GB")).replace(" ", "")
assert f"--memory-limit {formatted_bytes}" in job_script
assert "#OAR -l /nodes=1/core=8,walltime=00:02:00" in job_script
assert "#OAR --project" not in job_script
assert "#OAR -q" not in job_script
Expand All @@ -51,7 +54,8 @@ def test_job_script():
"{} -m distributed.cli.dask_worker tcp://".format(sys.executable)
in job_script
)
assert "--nthreads 2 --nprocs 4 --memory-limit 7.00GB" in job_script
formatted_bytes = format_bytes(parse_bytes("7GB")).replace(" ", "")
assert f"--nthreads 2 --nprocs 4 --memory-limit {formatted_bytes}" in job_script

with OARCluster(
walltime="00:02:00",
Expand All @@ -67,7 +71,8 @@ def test_job_script():
job_script = cluster.job_script()
assert "#OAR" in job_script
assert "#OAR -n dask-worker" in job_script
assert "--memory-limit 7.00GB " in job_script
formatted_bytes = format_bytes(parse_bytes("7GB")).replace(" ", "")
assert f"--memory-limit {formatted_bytes}" in job_script
assert "#OAR -l /nodes=1/core=8,walltime=00:02:00" in job_script
assert "#OAR --project" not in job_script
assert "#OAR -q" not in job_script
Expand All @@ -80,7 +85,8 @@ def test_job_script():
"{} -m distributed.cli.dask_worker tcp://".format(sys.executable)
in job_script
)
assert "--nthreads 2 --nprocs 4 --memory-limit 7.00GB" in job_script
formatted_bytes = format_bytes(parse_bytes("7GB")).replace(" ", "")
assert f"--nthreads 2 --nprocs 4 --memory-limit {formatted_bytes}" in job_script


def test_config_name_oar_takes_custom_config():
Expand Down
11 changes: 8 additions & 3 deletions dask_jobqueue/tests/test_pbs.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,11 @@
import sys
from time import sleep, time

import dask
import pytest

import dask
from dask.utils import format_bytes, parse_bytes

from dask.distributed import Client

from dask_jobqueue import MoabCluster, PBSCluster
Expand Down Expand Up @@ -72,7 +75,8 @@ def test_job_script(Cluster):
"{} -m distributed.cli.dask_worker tcp://".format(sys.executable)
in job_script
)
assert "--nthreads 2 --nprocs 4 --memory-limit 7.00GB" in job_script
formatted_bytes = format_bytes(parse_bytes("7GB")).replace(" ", "")
assert f"--nthreads 2 --nprocs 4 --memory-limit {formatted_bytes}" in job_script

with Cluster(
queue="regular",
Expand All @@ -95,7 +99,8 @@ def test_job_script(Cluster):
"{} -m distributed.cli.dask_worker tcp://".format(sys.executable)
in job_script
)
assert "--nthreads 2 --nprocs 4 --memory-limit 7.00GB" in job_script
formatted_bytes = format_bytes(parse_bytes("7GB")).replace(" ", "")
assert f"--nthreads 2 --nprocs 4 --memory-limit {formatted_bytes}" in job_script


@pytest.mark.env("pbs")
Expand Down
5 changes: 4 additions & 1 deletion dask_jobqueue/tests/test_sge.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

from dask_jobqueue import SGECluster
import dask
from dask.utils import format_bytes, parse_bytes

from . import QUEUE_WAIT

Expand Down Expand Up @@ -83,10 +84,12 @@ def test_job_script(tmpdir):
resource_spec="h_vmem=12G,mem_req=12G",
) as cluster:
job_script = cluster.job_script()
formatted_bytes = format_bytes(parse_bytes("6GB")).replace(" ", "")

for each in [
"--nprocs 2",
"--nthreads 3",
"--memory-limit 6.00GB",
f"--memory-limit {formatted_bytes}",
"-q my-queue",
"-P my-project",
"-l h_rt=02:00:00",
Expand Down
10 changes: 7 additions & 3 deletions dask_jobqueue/tests/test_slurm.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from distributed import Client

import dask
from dask.utils import format_bytes, parse_bytes

from dask_jobqueue import SLURMCluster

Expand Down Expand Up @@ -60,7 +61,8 @@ def test_job_script():
job_script = cluster.job_script()
assert "#SBATCH" in job_script
assert "#SBATCH -J dask-worker" in job_script
assert "--memory-limit 7.00GB " in job_script
formatted_bytes = format_bytes(parse_bytes("7GB")).replace(" ", "")
assert f"--memory-limit {formatted_bytes}" in job_script
assert "#SBATCH -n 1" in job_script
assert "#SBATCH --cpus-per-task=8" in job_script
assert "#SBATCH --mem=27G" in job_script
Expand All @@ -74,7 +76,8 @@ def test_job_script():
"{} -m distributed.cli.dask_worker tcp://".format(sys.executable)
in job_script
)
assert "--nthreads 2 --nprocs 4 --memory-limit 7.00GB" in job_script
formatted_bytes = format_bytes(parse_bytes("7GB")).replace(" ", "")
assert f"--nthreads 2 --nprocs 4 --memory-limit {formatted_bytes}" in job_script

with SLURMCluster(
walltime="00:02:00",
Expand Down Expand Up @@ -105,7 +108,8 @@ def test_job_script():
"{} -m distributed.cli.dask_worker tcp://".format(sys.executable)
in job_script
)
assert "--nthreads 2 --nprocs 4 --memory-limit 7.00GB" in job_script
formatted_bytes = format_bytes(parse_bytes("7GB")).replace(" ", "")
assert f"--nthreads 2 --nprocs 4 --memory-limit {formatted_bytes}" in job_script


@pytest.mark.env("slurm")
Expand Down

0 comments on commit ccde944

Please sign in to comment.