Skip to content

Commit

Permalink
Blacken after likely a black change.
Browse files Browse the repository at this point in the history
  • Loading branch information
lesteve committed Sep 29, 2020
1 parent a4a218a commit 0e03349
Show file tree
Hide file tree
Showing 6 changed files with 10 additions and 12 deletions.
6 changes: 3 additions & 3 deletions dask_jobqueue/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -362,7 +362,7 @@ def _close_job(cls, job_id):

@staticmethod
def _call(cmd, **kwargs):
""" Call a command using subprocess.Popen.
"""Call a command using subprocess.Popen.
This centralizes calls out to the command line, providing consistent
outputs, logging, and an opportunity to go asynchronous in the future.
Expand Down Expand Up @@ -590,7 +590,7 @@ def job_name(self):
return self._dummy_job.job_name

def scale(self, n=None, jobs=0, memory=None, cores=None):
""" Scale cluster to specified configurations.
"""Scale cluster to specified configurations.
Parameters
----------
Expand All @@ -612,7 +612,7 @@ def scale(self, n=None, jobs=0, memory=None, cores=None):
def adapt(
self, *args, minimum_jobs: int = None, maximum_jobs: int = None, **kwargs
):
""" Scale Dask cluster automatically based on scheduler activity.
"""Scale Dask cluster automatically based on scheduler activity.
Parameters
----------
Expand Down
4 changes: 2 additions & 2 deletions dask_jobqueue/htcondor.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,8 +95,8 @@ def __init__(
self.job_header_dict.update(self.job_extra)

def env_lines_to_dict(self, env_lines):
""" Convert an array of export statements (what we get from env-extra
in the config) into a dict """
"""Convert an array of export statements (what we get from env-extra
in the config) into a dict"""
env_dict = {}
for env_line in env_lines:
split_env_line = shlex.split(env_line)
Expand Down
6 changes: 2 additions & 4 deletions dask_jobqueue/lsf.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ async def _submit_job(self, script_filename):


def lsf_format_bytes_ceil(n, lsf_units="mb"):
""" Format bytes as text
"""Format bytes as text
Convert bytes to megabytes which LSF requires.
Expand All @@ -135,9 +135,7 @@ def lsf_format_bytes_ceil(n, lsf_units="mb"):


def lsf_detect_units():
""" Try to autodetect the unit scaling on an LSF system
"""
"""Try to autodetect the unit scaling on an LSF system"""
# Search for automatically, Using docs from LSF 9.1.3 for search/defaults
unit = "kb" # Default fallback unit
try:
Expand Down
2 changes: 1 addition & 1 deletion dask_jobqueue/pbs.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@


def pbs_format_bytes_ceil(n):
""" Format bytes as text.
"""Format bytes as text.
PBS expects KiB, MiB or Gib, but names it KB, MB, GB whereas Dask makes the difference between KB and KiB.
Expand Down
2 changes: 1 addition & 1 deletion dask_jobqueue/slurm.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ def __init__(


def slurm_format_bytes_ceil(n):
""" Format bytes as text.
"""Format bytes as text.
SLURM expects KiB, MiB or Gib, but names it KB, MB, GB. SLURM does not handle Bytes, only starts at KB.
Expand Down
2 changes: 1 addition & 1 deletion dask_jobqueue/tests/test_jobqueue_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -408,7 +408,7 @@ def test_wrong_parameter_error(Cluster):
)
with pytest.raises(ValueError, match=match):
create_cluster_func(
Cluster, cores=1, memory="1GB", wrong_parameter="wrong_parameter_value",
Cluster, cores=1, memory="1GB", wrong_parameter="wrong_parameter_value"
)


Expand Down

0 comments on commit 0e03349

Please sign in to comment.