Skip to content

Commit

Permalink
Merge pull request #66 from spencerkclark/moab
Browse files Browse the repository at this point in the history
Add MoabCluster
  • Loading branch information
guillaumeeb committed Jun 6, 2018
2 parents e0c2484 + a0cefa8 commit d8d1dbc
Show file tree
Hide file tree
Showing 4 changed files with 68 additions and 0 deletions.
1 change: 1 addition & 0 deletions dask_jobqueue/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# flake8: noqa
from . import config
from .core import JobQueueCluster
from .moab import MoabCluster
from .pbs import PBSCluster
from .slurm import SLURMCluster
from .sge import SGECluster
Expand Down
47 changes: 47 additions & 0 deletions dask_jobqueue/moab.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
from .core import docstrings
from .pbs import PBSCluster


class MoabCluster(PBSCluster):
__doc__ = docstrings.with_indents("""Launch Dask on a Moab cluster
Parameters
----------
queue : str
Destination queue for each worker job. Passed to `#PBS -q` option.
project : str
Accounting string associated with each worker job. Passed to
`#PBS -A` option.
resource_spec : str
Request resources and specify job placement. Passed to `#PBS -l`
option.
walltime : str
Walltime for each worker job.
job_extra : list
List of other PBS options, for example -j oe. Each option will be
prepended with the #PBS prefix.
%(JobQueueCluster.parameters)s
Examples
--------
>>> import os
>>> from dask_jobqueue import MoabCluster
>>> cluster = MoabCluster(processes=6, threads=1, project='gfdl_m',
memory='16G', resource_spec='96G',
job_extra=['-d /home/First.Last', '-M none'],
local_directory=os.getenv('TMPDIR', '/tmp'))
>>> cluster.start_workers(10) # this may take a few seconds to launch
>>> from dask.distributed import Client
>>> client = Client(cluster)
This also works with adaptive clusters. This automatically launches and
kill workers based on load.
>>> cluster.adapt()
""", 4)
submit_command = 'msub'
cancel_command = 'canceljob'

def _job_id_from_submit_output(self, out):
return out.strip()
1 change: 1 addition & 0 deletions docs/api.rst
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ API
.. autosummary::
:toctree: generated/

MoabCluster
PBSCluster
SLURMCluster
SGECluster
19 changes: 19 additions & 0 deletions docs/examples.rst
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,25 @@ PBS Deployments
walltime='02:00:00',
interface='ib0')
Moab Deployments
~~~~~~~~~~~~~~~~

On systems which use the Moab Workload Manager, a subclass of ``PBSCluster``
can be used, called ``MoabCluster``:

.. code-block:: python
import os
from dask_jobqueue import MoabCluster
cluster = MoabCluster(processes=6,
threads=1,
project='gfdl_m',
memory='16G',
resource_spec='pmem=96G',
job_extra=['-d /home/First.Last', '-M none'],
local_directory=os.getenv('TMPDIR', '/tmp'))
SGE Deployments
---------------

Expand Down

0 comments on commit d8d1dbc

Please sign in to comment.