Skip to content

Commit

Permalink
Merge pull request #448 from mystic-ai/ph/pc-1094-tags-on-push
Browse files Browse the repository at this point in the history
PC-1094 Tags on push
  • Loading branch information
paulcjh authored Apr 16, 2024
2 parents f8b5ec4 + 8edf588 commit 5d6ebc3
Show file tree
Hide file tree
Showing 6 changed files with 169 additions and 2 deletions.
31 changes: 31 additions & 0 deletions examples/basic/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
# {pipeline_name}

## Description

This is a new pipeline created using the `pipeline-ai` CLI.

### Configuration

This pipeline uses the following `pipeline.yaml`:

{pipeline_yaml}

## Usage

_Put usage information here_

## Code

{pipeline_code}

## License

_Put license information here_

## Author

_Put author information here_

---

_This is an autogenerated README file for the pipeline `{pipeline_name}`. Please read more information about this [here on our docs](https://docs.mystic.ai/docs/using-the-readmemd)_
42 changes: 42 additions & 0 deletions examples/basic/new_pipeline.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
from pipeline import Pipeline, Variable, entity, pipe


# Put your model inside of the below entity class
@entity
class MyModelClass:
@pipe(run_once=True, on_startup=True)
def load(self) -> None:
# Perform any operations needed to load your model here
print("Loading model...")

...

print("Model loaded!")

@pipe
def predict(self, output_number: int) -> str:
# Perform any operations needed to predict with your model here
print("Predicting...")

...

print("Prediction complete!")

return f"Your number: {output_number}"


with Pipeline() as builder:
input_var = Variable(
int,
description="A basic input number to do things with",
title="Input number",
)

my_model = MyModelClass()
my_model.load()

output_var = my_model.predict(input_var)

builder.output(output_var)

my_new_pipeline = builder.get_pipeline()
16 changes: 16 additions & 0 deletions examples/basic/pipeline.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
runtime:
container_commands:
- apt-get update
- apt-get install -y git
python:
version: '3.10'
requirements:
- pipeline-ai
accelerators: []
accelerator_memory: null
pipeline_graph: new_pipeline:my_new_pipeline
pipeline_name: basic
description: null
readme: README.md
extras: {}
cluster: null
12 changes: 12 additions & 0 deletions pipeline/console/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,6 +147,18 @@ def container_parser(command_parser: "_SubParsersAction[ArgumentParser]") -> Non
help="Push a pipeline container.",
)
push_parser.set_defaults(func=container._push_container)
push_parser.add_argument(
"--pointer",
"-p",
action="append",
help="Pointer for the container.",
)
push_parser.add_argument(
"--pointer-overwrite",
"-o",
action="store_true",
help="Overwrite existing pointers.",
)

up_parser = container_sub_parser.add_parser(
"up",
Expand Down
66 changes: 66 additions & 0 deletions pipeline/console/container/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
from pipeline.cloud.compute_requirements import Accelerator
from pipeline.cloud.schemas import cluster as cluster_schemas
from pipeline.cloud.schemas import pipelines as pipelines_schemas
from pipeline.cloud.schemas import pointers as pointers_schemas
from pipeline.cloud.schemas import registry as registry_schemas
from pipeline.container import docker_templates
from pipeline.util.logging import _print
Expand Down Expand Up @@ -51,6 +52,64 @@ class Config:
extra = "forbid"


def _edit_pointer(
existing_pointer: str,
pointer_or_pipeline_id: str,
):
edit_schema = pointers_schemas.PointerPatch(
pointer_or_pipeline_id=pointer_or_pipeline_id,
locked=False,
)

result = http.patch(
f"/v4/pointers/{existing_pointer}",
json.loads(
edit_schema.json(),
),
)

if result.status_code == 200:
pointer = pointers_schemas.PointerGet.parse_obj(result.json())
_print(f"Updated pointer {pointer.pointer} -> {pointer.pipeline_id}")
else:
_print(f"Failed to edit pointer {existing_pointer}", "ERROR")


def _create_pointer(
new_pointer: str,
pointer_or_pipeline_id: str,
force=False,
) -> None:
create_schema = pointers_schemas.PointerCreate(
pointer=new_pointer,
pointer_or_pipeline_id=pointer_or_pipeline_id,
locked=False,
)
result = http.post(
"/v4/pointers",
json.loads(
create_schema.json(),
),
handle_error=False,
)

if result.status_code == 409:
if force:
_print("Pointer already exists, forcing update", "WARNING")
_edit_pointer(new_pointer, pointer_or_pipeline_id)
else:
_print(
f"Pointer {new_pointer} already exists, use --pointer-overwrite to update", # noqa
"WARNING",
)
return
elif result.status_code == 201:
pointer = pointers_schemas.PointerGet.parse_obj(result.json())
_print(f"Created pointer {pointer.pointer} -> {pointer.pipeline_id}")
else:
raise ValueError(f"Failed to create pointer {new_pointer}\n{result.text}")


def _up_container(namespace: Namespace):
_print("Starting container...", "INFO")
config_file = Path("./pipeline.yaml")
Expand Down Expand Up @@ -284,6 +343,8 @@ def _push_container(namespace: Namespace):
config = config_file.read_text()
pipeline_config_yaml = yaml.load(config, Loader=yaml.FullLoader)

pointers: list | None = getattr(namespace, "pointer", None)

pipeline_config = PipelineConfig.parse_obj(pipeline_config_yaml)

# Check for file, transform to string, and put it back in config
Expand Down Expand Up @@ -452,6 +513,11 @@ def _push_container(namespace: Namespace):
"SUCCESS",
)

if pointers:
pointer_overwrite = getattr(namespace, "pointer_overwrite", False)
for pointer in pointers:
_create_pointer(pointer, new_deployment.id, force=pointer_overwrite)


def _init_dir(namespace: Namespace) -> None:
_print("Initializing directory...", "INFO")
Expand Down
4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "pipeline-ai"
version = "2.1.5"
version = "2.1.6"
description = "Pipelines for machine learning workloads."
authors = [
"Paul Hetherington <ph@mystic.ai>",
Expand Down Expand Up @@ -65,4 +65,4 @@ profile = "black"
pipeline = "pipeline.console:_run"

[tool.pytest.ini_options]
asyncio_mode = "auto"
asyncio_mode = "auto"

0 comments on commit 5d6ebc3

Please sign in to comment.