Skip to content

Commit

Permalink
chore: add pre-commit check for python (#5392)
Browse files Browse the repository at this point in the history
  • Loading branch information
hamidzr committed Dec 1, 2022
1 parent 1c06774 commit 6b88d71
Show file tree
Hide file tree
Showing 19 changed files with 132 additions and 165 deletions.
21 changes: 10 additions & 11 deletions .circleci/scripts/upload_test_results.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,12 @@ def upload_results(status, parallel_run, directory, job_id, access_key, url) ->
# Flush write data and reset pointer
temp_archive.flush()
temp_archive.seek(0)
response = requests.post(url,
headers={"x-api-key": access_key},
files={"report": temp_archive},
data={"status": status,
"parallel_run": parallel_run,
"job_id": job_id}
)
response = requests.post(
url,
headers={"x-api-key": access_key},
files={"report": temp_archive},
data={"status": status, "parallel_run": parallel_run, "job_id": job_id},
)
print(response)


Expand All @@ -31,13 +30,13 @@ def main() -> None:
parser.add_argument("access_key", help="Determined CI API key")
parser.add_argument("url", help="Determined CI server URL")
args = parser.parse_args()
upload_results(args.status, args.parallel_run,
args.filepath, args.job_id,
args.access_key, args.url)
upload_results(
args.status, args.parallel_run, args.filepath, args.job_id, args.access_key, args.url
)


if __name__ == "__main__":
try:
main()
except ConnectionError as err:
print(f"Error connecting to CI server {err}")
print(f"Error connecting to CI server {err}")
2 changes: 1 addition & 1 deletion .github/workflows/pre-commit.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ jobs:
env:
# these checks currently require a ridiculous number of dependencies :)
# consider repackaging them in a container in the future
SKIP: "web-js-lint-check,web-css-lint-check,web-misc-lint-check"
SKIP: "web-js-lint-check,web-css-lint-check,web-misc-lint-check,black"
steps:
- uses: actions/checkout@v3.0.0
with:
Expand Down
7 changes: 7 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -37,3 +37,10 @@ repos:
hooks:
- id: yamllint
files: '^.github/'
- repo: https://github.com/psf/black
rev: '20.8b1'
hooks:
- id: black
# known issue fixed in a newer version: https://github.com/psf/black/issues/2964#issuecomment-1080974737
additional_dependencies: ['click==8.0.4']
exclude: ^(examples|harness/determined/common/api/bindings.py)
22 changes: 9 additions & 13 deletions bindings/generate_bindings.py
Original file line number Diff line number Diff line change
Expand Up @@ -343,7 +343,7 @@ def gen_def(self) -> Code:
out += [f' kwargs["{k}"] = {parsed}']
out += [" return cls(**kwargs)"]
out += [""]
out += [' def to_json(self, omit_unset: bool = False) -> typing.Dict[str, typing.Any]:']
out += [" def to_json(self, omit_unset: bool = False) -> typing.Dict[str, typing.Any]:"]
out += [' out: "typing.Dict[str, typing.Any]" = {']
for k in required:
v = self.params[k]
Expand Down Expand Up @@ -501,8 +501,8 @@ def gen_def(self) -> Code:
f' "{self.method}_{self.name}",',
f' runtimeStreamError.from_json(_j["error"])',
f" )",
f' yield {yieldable}',
f' return',
f" yield {yieldable}",
f" return",
]
out += [f' raise APIHttpError("{self.method}_{self.name}", _resp)']

Expand Down Expand Up @@ -565,7 +565,9 @@ def process_enums(swagger_definitions: dict) -> typing.Dict[int, str]:
members = schema["enum"]
if enums.get(json.dumps(members)) is not None:
print(
"ambiguous enum parameter:", name, members,
"ambiguous enum parameter:",
name,
members,
file=sys.stderr,
)
enums[json.dumps(members)] = name
Expand Down Expand Up @@ -696,9 +698,7 @@ def process_paths(swagger_paths: dict, enums: dict) -> typing.Dict[str, Function
inlined = ("type", "format", "items", "properties", "enum")
pschema = {k: pspec[k] for k in inlined if k in pspec}
ptype = classify_type(enums, f"{name}.{pname}", pschema)
params[pname] = Parameter(
pname, ptype, required, where, serialized_name
)
params[pname] = Parameter(pname, ptype, required, where, serialized_name)

assert is_expected_path(path), (path, name)
path = path.replace(".", "_")
Expand All @@ -721,9 +721,7 @@ def gen_paginated(defs: TypeDefs) -> typing.List[str]:
continue
# Note that our goal is to mimic duck typing, so we only care if the "pagination" attribute
# exists with a v1Pagination type.
if any(
n == "pagination" and p.type.name == "v1Pagination" for n, p in defn.params.items()
):
if any(n == "pagination" and p.type.name == "v1Pagination" for n, p in defn.params.items()):
paginated.append(defn.name)

if not paginated:
Expand Down Expand Up @@ -826,9 +824,7 @@ def __str__(self) -> str:
import argparse

parser = argparse.ArgumentParser()
parser.add_argument(
"--output", "-o", action="store", required=True, help="output file"
)
parser.add_argument("--output", "-o", action="store", required=True, help="output file")
args = parser.parse_args()

with open(SWAGGER) as f:
Expand Down
7 changes: 5 additions & 2 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,11 +28,14 @@
"styles/determined.css",
]


def env_get_outdated(app, env, added, changed, removed):
return ['index']
return ["index"]


def setup(app):
app.connect('env-get-outdated', env_get_outdated)
app.connect("env-get-outdated", env_get_outdated)


exclude_patterns = [
"_build",
Expand Down
4 changes: 1 addition & 3 deletions docs/sphinx-wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,9 +35,7 @@ def main(sphinx_build_args: List[str]) -> int:
if len(download_link_errors) == 0:
return 0

sys.stderr.write(
"Download links are broken!! ':download:`<path> links`'\n" "Full paths:\n"
)
sys.stderr.write("Download links are broken!! ':download:`<path> links`'\n" "Full paths:\n")
for path in download_link_errors:
sys.stderr.write(f"{path}\n")

Expand Down
4 changes: 1 addition & 3 deletions docs/sphinx_ext_downloads.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,9 +44,7 @@ def process_doc(
if not os.access(filename, os.R_OK):
logger.warning(__("download file not readable: %s") % filename)
continue
node["filename"] = app.env.dlfiles.add_file(
app.env.docname, rel_filename
)
node["filename"] = app.env.dlfiles.add_file(app.env.docname, rel_filename)


def setup(app: application.Sphinx) -> Dict[str, Any]:
Expand Down
2 changes: 1 addition & 1 deletion master/static/srv/check_idle.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def wait_for_jupyter(addr):
# Connection worked, we're done here.
return
except ConnectionError as e:
if (i+1) % 10 == 0:
if (i + 1) % 10 == 0:
# Every 10 seconds without reaching jupyter, start telling the user.
# This is beyond the range of expected startup times.
logging.warning(f"jupyter is still not reachable at {addr}")
Expand Down
10 changes: 3 additions & 7 deletions master/static/srv/check_ready_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,19 +46,15 @@ def main(ready: Pattern, waiting: Optional[Pattern] = None):


if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Read STDIN for a match and mark a task as ready"
)
parser = argparse.ArgumentParser(description="Read STDIN for a match and mark a task as ready")
parser.add_argument(
"--ready-regex", type=str, help="the pattern to match task ready", required=True
)
parser.add_argument(
"--waiting-regex", type=str, help="the pattern to match task waiting"
)
parser.add_argument("--waiting-regex", type=str, help="the pattern to match task waiting")
args = parser.parse_args()

ready_regex = re.compile(args.ready_regex)
if args.waiting_regex:
if args.waiting_regex:
waiting_regrex = re.compile(args.waiting_regex)
main(ready_regex, waiting_regrex)
else:
Expand Down
14 changes: 4 additions & 10 deletions master/static/srv/enrich_task_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,7 @@
# 2022-05-12 16:32:48,757:gc_checkpoints: [rank=0] INFO: Determined checkpoint GC, version 0.17.16-dev0
# Below regex is used to extract the rank field from the log message.
# Excluding empty spaces this regex matches rank in the above example as [rank=0]
rank = re.compile(
"(?P<space1> ?)\[rank=(?P<rank_id>([0-9]+))\](?P<space2> ?)(?P<log>.*)"
)
rank = re.compile("(?P<space1> ?)\[rank=(?P<rank_id>([0-9]+))\](?P<space2> ?)(?P<log>.*)")
# Below regex is used to extract the message severity from the log message.
# Excluding empty spaces and delimiter(:) this regex matches message severity level in the above example as INFO
level = re.compile(
Expand Down Expand Up @@ -59,7 +57,7 @@ def __init__(
def run(self) -> None:
try:
for line in sys.stdin:
print(line, flush=True, end='')
print(line, flush=True, end="")
try:
parsed_metadata = {}

Expand All @@ -75,9 +73,7 @@ def run(self) -> None:

self.ship_queue.put(
{
"timestamp": datetime.datetime.now(
datetime.timezone.utc
).isoformat(),
"timestamp": datetime.datetime.now(datetime.timezone.utc).isoformat(),
"log": line,
**self.task_logging_metadata,
**parsed_metadata,
Expand Down Expand Up @@ -165,9 +161,7 @@ def main(
parser = argparse.ArgumentParser(
description="read a stream and enrich it with the standard logging metadata"
)
parser.add_argument(
"--stdtype", type=str, help="the stdtype of this stream", required=True
)
parser.add_argument("--stdtype", type=str, help="the stdtype of this stream", required=True)
args = parser.parse_args()

master_url = os.environ.get("DET_MASTER", os.environ.get("DET_MASTER_ADDR"))
Expand Down
8 changes: 2 additions & 6 deletions pre-commit/web_lint_check.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,17 +8,13 @@

def web_lint_check():
parser = argparse.ArgumentParser(description="Lint Check for Web")
parser.add_argument(
"target", help="Either js, css, or misc", choices=["js", "css", "misc"]
)
parser.add_argument("target", help="Either js, css, or misc", choices=["js", "css", "misc"])
parser.add_argument("file_paths", help="1 or more file paths", nargs="+")
args = parser.parse_args()
DIR = "webui/react/"

target: str = args.target
file_paths: str = " ".join(
[os.path.relpath(file_path, DIR) for file_path in args.file_paths]
)
file_paths: str = " ".join([os.path.relpath(file_path, DIR) for file_path in args.file_paths])
nproc: int = multiprocessing.cpu_count()
run_command: list[str] = [
"make",
Expand Down
9 changes: 5 additions & 4 deletions proto/scripts/swagger.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@

SERVICE_NAME = "Determined"


def merge_dict(d1: Dict, d2: Dict) -> None:
"""
Modifies d1 in-place to contain values from d2. If any value
Expand All @@ -31,9 +32,10 @@ def capitalize(s: str) -> str:
return s.title()
return s[0].upper() + s[1:]


def to_lower_camel_case(snake_str):
components = snake_str.split('_')
return components[0] + ''.join(x.title() for x in components[1:])
components = snake_str.split("_")
return components[0] + "".join(x.title() for x in components[1:])


def clean(path: str, patch: str) -> None:
Expand All @@ -60,8 +62,7 @@ def clean(path: str, patch: str) -> None:
for method, api in value.items():
cur_id = str(api["operationId"])
if cur_id.startswith(operationid_prefix):
spec["paths"][url][method]["operationId"] = cur_id[len(operationid_prefix):]

spec["paths"][url][method]["operationId"] = cur_id[len(operationid_prefix) :]

with open(patch, "r") as f:
merge_dict(spec, json.load(f))
Expand Down
2 changes: 2 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
[tool.black]
line-length = 100
16 changes: 4 additions & 12 deletions schemas/gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,9 +87,7 @@ def gen_go_schemas_package(schemas: List[Schema]) -> List[str]:
# Global variables (lazily loaded but otherwise constants).
lines.append("var (")
# Schema texts.
lines.extend(
[f"\ttext{schema.golang_title} = []byte(`{schema.text}`)" for schema in schemas]
)
lines.extend([f"\ttext{schema.golang_title} = []byte(`{schema.text}`)" for schema in schemas])
# Cached schema values, initially nil.
for schema in schemas:
lines.append(f"\tschema{schema.golang_title} interface{{}}")
Expand Down Expand Up @@ -226,9 +224,7 @@ def find_struct(file: str, gotype: str) -> Tuple[List[FieldSpec], List[UnionSpec
field_spec.append((field, type, tag))
continue

raise AssertionError(
f"unsure how to handle line {lineno}: '{line.rstrip()}'"
)
raise AssertionError(f"unsure how to handle line {lineno}: '{line.rstrip()}'")

# We should have exited when we saw the "}" line.
raise AssertionError(f"failed to find struct definition for {gotype} in {file}")
Expand Down Expand Up @@ -327,9 +323,7 @@ def get_defaulted_type(schema: Schema, tag: str, type: str) -> Tuple[str, str, b
return type, default, required


def go_getters_and_setters(
gotype: str, schema: Schema, spec: List[FieldSpec]
) -> List[str]:
def go_getters_and_setters(gotype: str, schema: Schema, spec: List[FieldSpec]) -> List[str]:
lines = [] # type: List[str]

if len(spec) < 1:
Expand Down Expand Up @@ -369,9 +363,7 @@ def go_getters_and_setters(
lines.append("")
lines.append(f"func ({x} {gotype}) {getter}() {defaulted_type} {{")
lines.append(f"\tif {x}.{field} == nil {{")
lines.append(
f'\t\tpanic("You must call WithDefaults on {gotype} before .{getter}")'
)
lines.append(f'\t\tpanic("You must call WithDefaults on {gotype} before .{getter}")')
lines.append("\t}")
lines.append(f"\treturn *{x}.{field}")
lines.append("}")
Expand Down
17 changes: 4 additions & 13 deletions schemas/lint.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,15 +186,10 @@ def check_default_locations(schema: dict, path: str, ctx: LintContext) -> Errors
"default is the literal 'null' string, probable typo",
)
)
elif (
not re.match("^<[^>]*>\\.[^.]*$", subpath)
and sub["default"] is not None
):
elif not re.match("^<[^>]*>\\.[^.]*$", subpath) and sub["default"] is not None:
# This is pretty valid in json-schema normally, but it makes reading defaults
# out of json-schema (which we need in multiple languages) much harder.
errors.append(
(subpath + ".default", "non-null default is defined on a subobject")
)
errors.append((subpath + ".default", "non-null default is defined on a subobject"))

return errors

Expand Down Expand Up @@ -423,18 +418,14 @@ def iter_schema(
# Apply linters to this structural element.
if ctx is None:
assert filepath, "filepath must be provided when ctx is None"
ctx = LintContext(
schema, path, toplevel=True, in_checks=in_checks, filepath=filepath
)
ctx = LintContext(schema, path, toplevel=True, in_checks=in_checks, filepath=filepath)
else:
ctx = LintContext(schema, path, False, ctx.in_checks, ctx.filepath)
for linter in linters:
try:
errors += linter(schema, path, ctx)
except Exception as e:
raise ValueError(
f"error processing schema:\n{json.dumps(schema, indent=4)}"
) from e
raise ValueError(f"error processing schema:\n{json.dumps(schema, indent=4)}") from e

# Descend into child dicts of structural elements.
for kw in ["properties"]:
Expand Down
8 changes: 2 additions & 6 deletions tools/scripts/gen-attributions.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,9 +57,7 @@ def __init__(
assert text.strip(), "License text not found"
assert name is not None, "a Name header is required"
assert type is not None, "a Type header is required"
assert (
type.lower() in known_licenses
), f"Type header must be one of {known_licenses}"
assert type.lower() in known_licenses, f"Type header must be one of {known_licenses}"
assert master.lower() in {"true", "false"}, "Master must be true or false"
assert agent.lower() in {"true", "false"}, "Agent must be true or false"
assert webui.lower() in {"true", "false"}, "Webui must be true or false"
Expand Down Expand Up @@ -230,9 +228,7 @@ def main(build_type: str, path_out: Optional[str]) -> int:
our_license_path,
)
elif sys.argv[1] == "agent":
gen = build_ascii(
[license for license in licenses if license.agent], our_license_path
)
gen = build_ascii([license for license in licenses if license.agent], our_license_path)

gen = post_process(gen)

Expand Down
Loading

0 comments on commit 6b88d71

Please sign in to comment.