diff --git a/pipeline/configuration/__init__.py b/pipeline/configuration/__init__.py index 6e4b7212..e8142cd4 100644 --- a/pipeline/configuration/__init__.py +++ b/pipeline/configuration/__init__.py @@ -10,9 +10,11 @@ PIPELINE_DIR = Path( os.getenv( "PIPELINE_DIR", - Path(os.getenv("LOCALAPPDATA")) / ".pipeline/" - if (sys.platform == "win32" or sys.platform == "cygwin") - else Path.home() / ".pipeline/", + ( + Path(os.getenv("LOCALAPPDATA")) / ".pipeline/" + if (sys.platform == "win32" or sys.platform == "cygwin") + else Path.home() / ".pipeline/" + ), ) ) diff --git a/pipeline/console/cluster.py b/pipeline/console/cluster.py index ae1e1b31..426b00ee 100644 --- a/pipeline/console/cluster.py +++ b/pipeline/console/cluster.py @@ -116,9 +116,11 @@ def _get(namespace: Namespace) -> None: return remotes = [ - f"{_remote} (active)" - if _remote is current_configuration.active_remote - else f"{_remote}" + ( + f"{_remote} (active)" + if _remote is current_configuration.active_remote + else f"{_remote}" + ) for _remote in current_configuration.remotes ] diff --git a/pipeline/console/container/build.py b/pipeline/console/container/build.py index 15497700..c664faae 100644 --- a/pipeline/console/container/build.py +++ b/pipeline/console/container/build.py @@ -61,27 +61,28 @@ def build_container(namespace: Namespace): dockerfile_path.write_text(dockerfile_str) else: dockerfile_path = Path(dockerfile_path) + docker_client = docker.APIClient() - generator = docker_client.build( - path="./", - dockerfile=dockerfile_path.absolute(), - rm=True, - decode=True, - platform="linux/amd64", - ) - docker_image_id = None - while True: - try: - output = generator.__next__() - if "aux" in output: - docker_image_id = output["aux"]["ID"] - if "stream" in output: - _print(output["stream"].strip("\n")) - if "errorDetail" in output: - raise Exception(output["errorDetail"]) - except StopIteration: - _print("Docker image build complete.") - break + with dockerfile_path.open("rb") as dockerfile_obj: + generator = docker_client.build( + fileobj=dockerfile_obj, + rm=True, + decode=True, + platform="linux/amd64", + ) + docker_image_id = None + while True: + try: + output = generator.__next__() + if "aux" in output: + docker_image_id = output["aux"]["ID"] + if "stream" in output: + _print(output["stream"].strip("\n")) + if "errorDetail" in output: + raise Exception(output["errorDetail"]) + except StopIteration: + _print("Docker image build complete.") + break docker_client = docker.from_env() new_container = docker_client.images.get(docker_image_id) diff --git a/pipeline/console/targets/resources.py b/pipeline/console/targets/resources.py index 32acfef8..71cfe90a 100644 --- a/pipeline/console/targets/resources.py +++ b/pipeline/console/targets/resources.py @@ -45,24 +45,28 @@ def list_resources() -> None: for cached_pipelines in resource["pipeline_cache"].values() for p_id in cached_pipelines ], - _shorten_id(str(resource["current_run"])) - if (resource["busy"] == 1 and resource["current_run"] != -1) - else "-", + ( + _shorten_id(str(resource["current_run"])) + if (resource["busy"] == 1 and resource["current_run"] != -1) + else "-" + ), [_shorten_id(id) for id in resource["run_queue"]], - "cpu" - if not (accelerators := resource.get("gpus", None)) - else ( + ( "cpu" - if "cpu" in accelerators - else "\n".join( - [ - f"{[accel['name'] for accel in accelerators].count(accelerator)}× {Accelerator.from_str(accelerator)} ({round(sum([accel['vram_total_mb'] for accel in accelerators if accel['name'] == accelerator]) / 1024.0, 1)}GB VRAM)" # noqa E501 - for accelerator in set( - [accel["name"] for accel in accelerators] - ) - ] + if not (accelerators := resource.get("gpus", None)) + else ( + "cpu" + if "cpu" in accelerators + else "\n".join( + [ + f"{[accel['name'] for accel in accelerators].count(accelerator)}× {Accelerator.from_str(accelerator)} ({round(sum([accel['vram_total_mb'] for accel in accelerators if accel['name'] == accelerator]) / 1024.0, 1)}GB VRAM)" # noqa E501 + for accelerator in set( + [accel["name"] for accel in accelerators] + ) + ] + ) ) - ) + ), # "N/A" # if resource["gpus"] is None # else [gpu["name"].strip() for gpu in resource["gpus"]], diff --git a/pipeline/console/targets/scaling_configs.py b/pipeline/console/targets/scaling_configs.py index 698a4cc9..f5659bd8 100644 --- a/pipeline/console/targets/scaling_configs.py +++ b/pipeline/console/targets/scaling_configs.py @@ -59,9 +59,11 @@ def _get_scaling_config(args: Namespace) -> None: [ scaling["id"], scaling["name"], - datetime.fromtimestamp(scaling.get("created_at")) - if "created_at" in scaling - else "N/A", + ( + datetime.fromtimestamp(scaling.get("created_at")) + if "created_at" in scaling + else "N/A" + ), scaling["type"], scaling["args"], ] diff --git a/pyproject.toml b/pyproject.toml index ebe3cc29..3b2f8be9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "pipeline-ai" -version = "2.4.2" +version = "2.4.3" description = "Pipelines for machine learning workloads." authors = [ "Paul Hetherington ",