Skip to content

Commit

Permalink
Feat: make it python 38 39 compatible (#41)
Browse files Browse the repository at this point in the history
* feat: typing back to 3.8

* doc: update readme with new python versions

* fix: update ci with new python versions
  • Loading branch information
julesbertrand committed Sep 27, 2023
1 parent 9500a03 commit 4e50c99
Show file tree
Hide file tree
Showing 9 changed files with 45 additions and 41 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ['3.10']
python-version: ['3.8', '3.9', '3.10']

steps:
- uses: actions/checkout@v2
Expand Down Expand Up @@ -47,7 +47,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ['3.10']
python-version: ['3.8', '3.9', '3.10']

steps:
- uses: actions/checkout@v2
Expand Down
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
<!-- PROJECT SHIELDS -->
<div align="center">

[![Python Version](https://img.shields.io/badge/Python-3.10-informational.svg)](#supported-python-versions)
[![Python Version](https://img.shields.io/badge/Python-3.8_3.9_3.10-blue?logo=python)](#supported-python-versions)
[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
[![Imports: isort](https://img.shields.io/badge/%20imports-isort-%231674b1?style=flat&labelColor=ef8336)](https://pycqa.github.io/isort/)
[![Linting: ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff)
Expand Down Expand Up @@ -56,7 +56,7 @@ Commands:
## Prerequisites

- Unix-like environment (Linux, macOS, WSL, etc...)
- Python 3.10
- Python 3.8 to 3.10
- Google Cloud SDK
- A GCP project with Vertex Pipelines enabled

Expand Down
3 changes: 2 additions & 1 deletion deployer/cli.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import sys
from pathlib import Path
from typing import List

import typer
from loguru import logger
Expand Down Expand Up @@ -88,7 +89,7 @@ def deploy(
),
] = False,
tags: Annotated[
list[str], typer.Option(help="The tags to use when uploading the pipeline.")
List[str], typer.Option(help="The tags to use when uploading the pipeline.")
] = DEFAULT_TAGS,
config_filepath: Annotated[
Path,
Expand Down
9 changes: 5 additions & 4 deletions deployer/pipeline_checks.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
import shutil
from pathlib import Path
from typing import Annotated, Any, Generic, TypeVar
from typing import Any, Dict, Generic, List, TypeVar

from loguru import logger
from pydantic import Field, computed_field, model_validator
from typing_extensions import Annotated

from deployer.constants import (
CONFIG_ROOT_PATH,
Expand All @@ -27,14 +28,14 @@
class DynamicConfigsModel(CustomBaseModel, Generic[PipelineConfigT]):
"""Model used to generate checks for configs based on pipeline dynamic model"""

configs: dict[str, PipelineConfigT]
configs: Dict[str, PipelineConfigT]


class Pipeline(CustomBaseModel):
"""Validation of one pipeline and its configs"""

pipeline_name: PipelineName
config_paths: Annotated[list[Path], Field(validate_default=True)] = None
config_paths: Annotated[List[Path], Field(validate_default=True)] = None

@model_validator(mode="before")
@classmethod
Expand Down Expand Up @@ -97,7 +98,7 @@ def validate_configs(self):
class Pipelines(CustomBaseModel):
"""Model to validate multiple pipelines at once"""

pipelines: dict[str, Pipeline]
pipelines: Dict[str, Pipeline]

@model_validator(mode="before")
@classmethod
Expand Down
44 changes: 22 additions & 22 deletions deployer/pipeline_deployer.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import os
from pathlib import Path
from typing import Callable
from typing import Callable, List, Optional

from google.cloud import aiplatform
from google.cloud.aiplatform import PipelineJobSchedule
Expand All @@ -23,12 +23,12 @@ def __init__(
self,
pipeline_name: str,
pipeline_func: Callable,
project_id: str | None = None,
region: str | None = None,
staging_bucket_name: str | None = None,
service_account: str | None = None,
gar_location: str | None = None,
gar_repo_id: str | None = None,
project_id: Optional[str] = None,
region: Optional[str] = None,
staging_bucket_name: Optional[str] = None,
service_account: Optional[str] = None,
gar_location: Optional[str] = None,
gar_repo_id: Optional[str] = None,
local_package_path: Path = DEFAULT_LOCAL_PACKAGE_PATH,
) -> None:
"""I don't want to write a dostring here but ruff wants me to"""
Expand All @@ -53,7 +53,7 @@ def __init__(
)

@property
def gar_host(self) -> str | None:
def gar_host(self) -> Optional[str]:
"""Return the Artifact Registry host if the location and repo ID are provided"""
if self.gar_location is not None and self.gar_repo_id is not None:
return os.path.join(
Expand All @@ -68,7 +68,7 @@ def gar_host(self) -> str | None:
def staging_bucket_uri(self) -> str: # noqa: D102
return f"gs://{self.staging_bucket_name}/root"

def _get_template_path(self, tag: str | None = None) -> str:
def _get_template_path(self, tag: Optional[str] = None) -> str:
"""Return the path to the pipeline template
If the Artifact Registry host is provided, return the path to the pipeline template in
Expand Down Expand Up @@ -96,7 +96,7 @@ def _check_gar_host(self) -> None:
"Please provide gar_location and gar_repo_id."
)

def _check_experiment_name(self, experiment_name: str | None = None) -> str:
def _check_experiment_name(self, experiment_name: Optional[str] = None) -> str:
if experiment_name is None:
experiment_name = f"{self.pipeline_name}-experiment"
logger.info(f"Experiment name not provided, using {experiment_name}")
Expand All @@ -109,8 +109,8 @@ def _create_pipeline_job(
self,
template_path: str,
enable_caching: bool = False,
parameter_values: dict | None = None,
input_artifacts: dict | None = None,
parameter_values: Optional[dict] = None,
input_artifacts: Optional[dict] = None,
) -> aiplatform.PipelineJob:
job = aiplatform.PipelineJob(
display_name=self.pipeline_name,
Expand Down Expand Up @@ -138,7 +138,7 @@ def compile(self) -> "VertexPipelineDeployer":

def upload_to_registry(
self,
tags: list[str] = ["latest"], # noqa: B006
tags: List[str] = ["latest"], # noqa: B006
) -> "VertexPipelineDeployer":
"""Upload pipeline to Artifact Registry"""
self._check_gar_host()
Expand All @@ -155,10 +155,10 @@ def upload_to_registry(
def run(
self,
enable_caching: bool = False,
parameter_values: dict | None = None,
input_artifacts: dict | None = None,
experiment_name: str | None = None,
tag: str | None = None,
parameter_values: Optional[dict] = None,
input_artifacts: Optional[dict] = None,
experiment_name: Optional[str] = None,
tag: Optional[str] = None,
) -> "VertexPipelineDeployer":
"""Run pipeline on Vertex AI Pipelines
Expand Down Expand Up @@ -194,9 +194,9 @@ def run(
def compile_upload_run(
self,
enable_caching: bool = False,
parameter_values: dict | None = None,
experiment_name: str | None = None,
tags: list[str] = ["latest"], # noqa: B006
parameter_values: Optional[dict] = None,
experiment_name: Optional[str] = None,
tags: List[str] = ["latest"], # noqa: B006
) -> "VertexPipelineDeployer":
"""Compile, upload and run pipeline on Vertex AI Pipelines"""
self.compile()
Expand All @@ -216,8 +216,8 @@ def schedule(
self,
cron: str,
enable_caching: bool = False,
parameter_values: dict | None = None,
tag: str | None = None,
parameter_values: Optional[dict] = None,
tag: Optional[str] = None,
delete_last_schedule: bool = False,
) -> "VertexPipelineDeployer":
"""Create pipeline schedule on Vertex AI Pipelines
Expand Down
15 changes: 8 additions & 7 deletions deployer/utils/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import json
from enum import Enum
from pathlib import Path
from typing import List, Optional, Tuple, Union

from pydantic import ValidationError
from pydantic_settings import BaseSettings, SettingsConfigDict
Expand All @@ -20,7 +21,7 @@ class VertexPipelinesSettings(BaseSettings): # noqa: D101
VERTEX_SERVICE_ACCOUNT: str


def load_vertex_settings(env_file: Path | None = None) -> VertexPipelinesSettings:
def load_vertex_settings(env_file: Optional[Path] = None) -> VertexPipelinesSettings:
"""Load the settings from the environment."""
try:
settings = VertexPipelinesSettings(_env_file=env_file, _env_file_encoding="utf-8")
Expand All @@ -40,15 +41,15 @@ class ConfigType(str, Enum): # noqa: D101
py = "py"


def list_config_filepaths(config_root_path: Path | str, pipeline_name: str) -> list[Path]:
def list_config_filepaths(config_root_path: Union[Path, str], pipeline_name: str) -> List[Path]:
"""List the config filepaths for a pipeline.
Args:
config_root_path (Path): A `Path` object representing the root path of the configs.
pipeline_name (str): The name of the pipeline.
Returns:
list[Path]: A list of `Path` objects representing the config filepaths.
List[Path]: A list of `Path` objects representing the config filepaths.
"""
configs_dirpath = Path(config_root_path) / pipeline_name
config_filepaths = [
Expand All @@ -59,7 +60,7 @@ def list_config_filepaths(config_root_path: Path | str, pipeline_name: str) -> l
return config_filepaths


def load_config(config_filepath: Path) -> tuple[dict | None, dict | None]:
def load_config(config_filepath: Path) -> Tuple[Optional[dict], Optional[dict]]:
"""Load the parameter values and input artifacts from a config file.
Config file can be a JSON or Python file.
Expand All @@ -71,7 +72,7 @@ def load_config(config_filepath: Path) -> tuple[dict | None, dict | None]:
config_filepath (Path): A `Path` object representing the path to the config file.
Returns:
tuple[dict | None, dict | None]: A tuple containing the loaded parameter values
Tuple[Optional[dict], Optional[dict]]:: A tuple containing the loaded parameter values
and input artifacts (or `None` if not available).
Raises:
Expand All @@ -94,14 +95,14 @@ def load_config(config_filepath: Path) -> tuple[dict | None, dict | None]:
)


def _load_config_python(config_filepath: Path) -> tuple[dict | None, dict | None]:
def _load_config_python(config_filepath: Path) -> Tuple[Optional[dict], Optional[dict]]:
"""Load the parameter values and input artifacts from a Python config file.
Args:
config_filepath (Path): A `Path` object representing the path to the config file.
Returns:
tuple[dict | None, dict | None]: A tuple containing the loaded parameter values
Tuple[Optional[dict], Optional[dict]]:: A tuple containing the loaded parameter values
(or `None` if not available) and input artifacts (or `None` if not available).
Raises:
Expand Down
2 changes: 1 addition & 1 deletion deployer/utils/models.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
from inspect import signature
from typing import _AnnotatedAlias

import kfp.components.graph_component
from pydantic import BaseModel, ConfigDict, create_model
from typing_extensions import _AnnotatedAlias


class CustomBaseModel(BaseModel):
Expand Down
3 changes: 2 additions & 1 deletion deployer/utils/utils.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import importlib
from enum import Enum
from pathlib import Path
from typing import Optional

from kfp.components import graph_component
from loguru import logger
Expand Down Expand Up @@ -34,7 +35,7 @@ def import_pipeline_from_dir(dirpath: Path, pipeline_name: str) -> graph_compone
) from e

try:
pipeline: graph_component.GraphComponent | None = pipeline_module.pipeline
pipeline: Optional[graph_component.GraphComponent] = pipeline_module.pipeline
except AttributeError as e:
raise ImportError(
f"Pipeline {module_path}:pipeline not found. "
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ packages = [{include = "deployer"}]
vertex-deployer = "deployer.cli:app"

[tool.poetry.dependencies]
python = ">=3.10,<3.11.0"
python = ">=3.8, <3.11.0"
kfp = ">=2.0.1, <2.1.0"
google-cloud-aiplatform = "^1.26.1"
requests = "^2.31.0"
Expand Down

0 comments on commit 4e50c99

Please sign in to comment.