Skip to content

Commit

Permalink
[pre-commit.ci] pre-commit autoupdate (#359)
Browse files Browse the repository at this point in the history
<!--pre-commit.ci start-->
updates:
- [github.com/astral-sh/ruff-pre-commit: v0.0.282 →
v0.0.284](astral-sh/ruff-pre-commit@v0.0.282...v0.0.284)
<!--pre-commit.ci end-->

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
Co-authored-by: Jinzhe Zeng <jinzhe.zeng@rutgers.edu>
  • Loading branch information
pre-commit-ci[bot] and njzjz committed Aug 15, 2023
1 parent eabede4 commit 22f49df
Show file tree
Hide file tree
Showing 8 changed files with 32 additions and 48 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ repos:
- id: black-jupyter
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.0.282
rev: v0.0.284
hooks:
- id: ruff
args: ["--fix"]
Expand Down
48 changes: 19 additions & 29 deletions dpdispatcher/hdfs_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,15 +23,13 @@ def exists(uri):
return False
else:
raise RuntimeError(
"Cannot check existence of hdfs uri[{}] "
"with cmd[{}]; ret[{}] stdout[{}] stderr[{}]".format(
uri, cmd, ret, out, err
)
f"Cannot check existence of hdfs uri[{uri}] "
f"with cmd[{cmd}]; ret[{ret}] stdout[{out}] stderr[{err}]"
)
except Exception as e:
raise RuntimeError(
"Cannot check existence of hdfs uri[{}] "
"with cmd[{}]".format(uri, cmd)
f"Cannot check existence of hdfs uri[{uri}] "
f"with cmd[{cmd}]"
) from e

@staticmethod
Expand All @@ -47,14 +45,12 @@ def remove(uri):
return True
else:
raise RuntimeError(
"Cannot remove hdfs uri[{}] "
"with cmd[{}]; ret[{}] output[{}] stderr[{}]".format(
uri, cmd, ret, out, err
)
f"Cannot remove hdfs uri[{uri}] "
f"with cmd[{cmd}]; ret[{ret}] output[{out}] stderr[{err}]"
)
except Exception as e:
raise RuntimeError(
"Cannot remove hdfs uri[{}] " "with cmd[{}]".format(uri, cmd)
f"Cannot remove hdfs uri[{uri}] " f"with cmd[{cmd}]"
) from e

@staticmethod
Expand All @@ -70,14 +66,12 @@ def mkdir(uri):
return True
else:
raise RuntimeError(
"Cannot mkdir of hdfs uri[{}] "
"with cmd[{}]; ret[{}] output[{}] stderr[{}]".format(
uri, cmd, ret, out, err
)
f"Cannot mkdir of hdfs uri[{uri}] "
f"with cmd[{cmd}]; ret[{ret}] output[{out}] stderr[{err}]"
)
except Exception as e:
raise RuntimeError(
"Cannot mkdir of hdfs uri[{}] " "with cmd[{}]".format(uri, cmd)
f"Cannot mkdir of hdfs uri[{uri}] " f"with cmd[{cmd}]"
) from e

@staticmethod
Expand All @@ -88,7 +82,7 @@ def copy_from_local(local_path, to_uri):
# Make sure local_path is accessible
if not os.path.exists(local_path) or not os.access(local_path, os.R_OK):
raise RuntimeError(
"try to access local_path[{}] " "but failed".format(local_path)
f"try to access local_path[{local_path}] " "but failed"
)
cmd = f"hadoop fs -copyFromLocal -f {local_path} {to_uri}"
try:
Expand Down Expand Up @@ -141,14 +135,12 @@ def read_hdfs_file(uri):
return out
else:
raise RuntimeError(
"Cannot read text from uri[{}]"
"cmd [{}] ret[{}] output[{}] stderr[{}]".format(
uri, cmd, ret, out, err
)
f"Cannot read text from uri[{uri}]"
f"cmd [{cmd}] ret[{ret}] output[{out}] stderr[{err}]"
)
except Exception as e:
raise RuntimeError(
"Cannot read text from uri[{}]" "cmd [{}]".format(uri, cmd)
f"Cannot read text from uri[{uri}]" f"cmd [{cmd}]"
) from e

@staticmethod
Expand All @@ -160,14 +152,12 @@ def move(from_uri, to_uri):
return True
else:
raise RuntimeError(
"Cannot move from_uri[{}] to "
"to_uri[{}] with cmd[{}]; "
"ret[{}] output[{}] stderr[{}]".format(
from_uri, to_uri, cmd, ret, out, err
)
f"Cannot move from_uri[{from_uri}] to "
f"to_uri[{to_uri}] with cmd[{cmd}]; "
f"ret[{ret}] output[{out}] stderr[{err}]"
)
except Exception as e:
raise RuntimeError(
"Cannot move from_uri[{}] to "
"to_uri[{}] with cmd[{}]".format(from_uri, to_uri, cmd)
f"Cannot move from_uri[{from_uri}] to "
f"to_uri[{to_uri}] with cmd[{cmd}]"
) from e
2 changes: 1 addition & 1 deletion dpdispatcher/hdfs_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ def __init__(
*args,
**kwargs,
):
assert type(local_root) == str
assert isinstance(local_root, str)
self.init_local_root = local_root
self.init_remote_root = remote_root
self.temp_local_root = os.path.abspath(local_root)
Expand Down
2 changes: 1 addition & 1 deletion dpdispatcher/lazy_local_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def __init__(
*args,
**kwargs,
):
assert type(local_root) == str
assert isinstance(local_root, str)
self.init_local_root = local_root
self.init_remote_root = remote_root
self.temp_local_root = os.path.abspath(local_root)
Expand Down
2 changes: 1 addition & 1 deletion dpdispatcher/local_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ def __init__(
*args,
**kwargs,
):
assert type(local_root) == str
assert isinstance(local_root, str)
self.init_local_root = local_root
self.init_remote_root = remote_root
self.temp_local_root = os.path.abspath(local_root)
Expand Down
16 changes: 5 additions & 11 deletions dpdispatcher/lsf.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,24 +43,18 @@ def gen_script_header(self, job):
if gpu_new_syntax_flag is True:
if gpu_exclusive_flag is True:
script_header_dict["lsf_number_gpu_line"] = (
"#BSUB -gpu 'num={gpu_per_node}:mode=shared:"
"j_exclusive=yes'".format(
gpu_per_node=resources.gpu_per_node
)
f"#BSUB -gpu 'num={resources.gpu_per_node}:mode=shared:"
"j_exclusive=yes'"
)
else:
script_header_dict["lsf_number_gpu_line"] = (
"#BSUB -gpu 'num={gpu_per_node}:mode=shared:"
"j_exclusive=no'".format(
gpu_per_node=resources.gpu_per_node
)
f"#BSUB -gpu 'num={resources.gpu_per_node}:mode=shared:"
"j_exclusive=no'"
)
else:
script_header_dict["lsf_number_gpu_line"] = (
'#BSUB -R "select[ngpus >0] rusage['
'ngpus_excl_p={gpu_per_node}]"'.format(
gpu_per_node=resources.gpu_per_node
)
f'ngpus_excl_p={resources.gpu_per_node}]"'
)
else:
script_header_dict["lsf_number_gpu_line"] = ""
Expand Down
2 changes: 1 addition & 1 deletion dpdispatcher/ssh_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -408,7 +408,7 @@ def __init__(
*args,
**kwargs,
):
assert type(local_root) == str
assert isinstance(local_root, str)
self.init_local_root = local_root
self.init_remote_root = remote_root
self.temp_local_root = os.path.abspath(local_root)
Expand Down
6 changes: 3 additions & 3 deletions dpdispatcher/submission.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,15 +167,15 @@ def register_task(self, task):
if self.belonging_jobs:
raise RuntimeError(
"Not allowed to register tasks after generating jobs. "
"submission hash error {self}".format(self=self)
f"submission hash error {self}"
)
self.belonging_tasks.append(task)

def register_task_list(self, task_list):
if self.belonging_jobs:
raise RuntimeError(
"Not allowed to register tasks after generating jobs. "
"submission hash error {self}".format(self=self)
f"submission hash error {self}"
)
self.belonging_tasks.extend(task_list)

Expand Down Expand Up @@ -464,7 +464,7 @@ def generate_jobs(self):
f"Can not generate jobs when submission.belonging_jobs is not empty. debug:{self}"
)
group_size = self.resources.group_size
if (group_size < 0) or (type(group_size) is not int):
if (group_size < 0) or (not isinstance(group_size, int)):
raise RuntimeError("group_size must be a positive number")
task_num = len(self.belonging_tasks)
if task_num == 0:
Expand Down

0 comments on commit 22f49df

Please sign in to comment.