Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 18 additions & 1 deletion instill/resources/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,22 @@
import instill.protogen.model.model.v1alpha.model_pb2 as model_pb
import instill.protogen.model.model.v1alpha.task_classification_pb2 as task_classification
import instill.protogen.model.model.v1alpha.task_detection_pb2 as task_detection
import instill.protogen.model.model.v1alpha.task_keypoint_pb2 as task_keypoint
import instill.protogen.model.model.v1alpha.task_ocr_pb2 as task_ocr
import instill.protogen.model.model.v1alpha.task_semantic_segmentation_pb2 as task_semantic_segmentation
import instill.protogen.model.model.v1alpha.task_text_generation_pb2 as task_text_generation
import instill.protogen.model.model.v1alpha.task_text_to_image_pb2 as task_text_to_image
import instill.protogen.vdp.connector.v1alpha.connector_pb2 as connector_pb
import instill.protogen.vdp.pipeline.v1alpha.pipeline_pb2 as pipeline_pb
from instill.resources.connector import Connector
from instill.resources.connector_ai import OpenAIConnector, StabilityAIConnector
from instill.resources.connector_ai import (
InstillModelConnector,
OpenAIConnector,
StabilityAIConnector,
)
from instill.resources.connector_blockchain import NumbersConnector
from instill.resources.connector_data import PineconeConnector
from instill.resources.model import GithubModel, HugginfaceModel, Model
from instill.resources.operator import create_end_operator, create_start_operator
from instill.resources.pipeline import Pipeline
from instill.resources.recipe import create_recipe
11 changes: 10 additions & 1 deletion instill/resources/connector.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# pylint: disable=no-member,wrong-import-position,no-name-in-module
import instill.protogen.vdp.connector.v1alpha.connector_definition_pb2 as connector_definition_interface
import instill.protogen.vdp.connector.v1alpha.connector_pb2 as connector_interface
import instill.protogen.vdp.pipeline.v1alpha.pipeline_pb2 as pipeline_interface
from instill.clients import InstillClient
from instill.resources.resource import Resource

Expand All @@ -15,7 +16,7 @@ def __init__(
) -> None:
super().__init__()
self.client = client
connector = client.connector_service.get_connector(name=name)
connector = client.connector_service.get_connector(name=name, silent=True)
if connector is None:
connector = client.connector_service.create_connector(
name=name,
Expand Down Expand Up @@ -56,6 +57,14 @@ def resource(self):
def resource(self, resource: connector_interface.ConnectorResource):
self._resource = resource

def create_component(self, name: str, config: dict) -> pipeline_interface.Component:
component = pipeline_interface.Component()
component.id = name
component.definition_name = self.get_definition().name
component.resource_name = self.resource.name
component.configuration.update(config)
return component

def get_definition(self) -> connector_definition_interface.ConnectorDefinition:
return self.resource.connector_definition

Expand Down
2 changes: 1 addition & 1 deletion instill/resources/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ def __init__(
) -> None:
super().__init__()
self.client = client
model = client.model_serevice.get_model(model_name=name)
model = client.model_serevice.get_model(model_name=name, silent=True)
if model is None:
model = client.model_serevice.create_model(
name=name,
Expand Down
20 changes: 20 additions & 0 deletions instill/resources/operator.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
# pylint: disable=no-member,wrong-import-position
import instill.protogen.vdp.pipeline.v1alpha.pipeline_pb2 as pipeline_pb


def create_start_operator(config: dict) -> pipeline_pb.Component:
start_operator_component = pipeline_pb.Component()
start_operator_component.id = "start"
start_operator_component.definition_name = "operator-definitions/start-operator"
start_operator_component.configuration.update(config)

return start_operator_component


def create_end_operator(config: dict) -> pipeline_pb.Component:
end_operator_component = pipeline_pb.Component()
end_operator_component.id = "end"
end_operator_component.definition_name = "operator-definitions/end-operator"
end_operator_component.configuration.update(config)

return end_operator_component
8 changes: 6 additions & 2 deletions instill/resources/pipeline.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
# pylint: disable=no-member,wrong-import-position,no-name-in-module
from typing import Tuple

import instill.protogen.vdp.pipeline.v1alpha.pipeline_pb2 as pipeline_interface
from instill.clients import InstillClient
from instill.resources.resource import Resource
Expand All @@ -13,7 +15,7 @@ def __init__(
) -> None:
super().__init__()
self.client = client
pipeline = client.pipeline_service.get_pipeline(name=name)
pipeline = client.pipeline_service.get_pipeline(name=name, silent=True)
if pipeline is None:
pipeline = client.pipeline_service.create_pipeline(name=name, recipe=recipe)
if pipeline is None:
Expand All @@ -25,7 +27,9 @@ def __del__(self):
if self.resource is not None:
self.client.pipeline_service.delete_pipeline(self.resource.id)

def __call__(self, task_inputs: list) -> list:
def __call__(
self, task_inputs: list
) -> Tuple[list, pipeline_interface.TriggerMetadata]:
return self.client.pipeline_service.trigger_pipeline(
self.resource.id, task_inputs
)
Expand Down
10 changes: 10 additions & 0 deletions instill/resources/recipe.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
# pylint: disable=no-member,wrong-import-position
import instill.protogen.vdp.pipeline.v1alpha.pipeline_pb2 as pipeline_pb


def create_recipe(component: list) -> pipeline_pb.Recipe:
recipe = pipeline_pb.Recipe()
recipe.version = "v1alpha"
recipe.components.extend(component)

return recipe
7 changes: 5 additions & 2 deletions instill/utils/error_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,16 @@ def __str__(self) -> str:

def grpc_handler(func):
def func_wrapper(*args, **kwargs):
silent = kwargs.pop("silent", False)
try:
if not args[0].is_serving():
raise NotServingException
return func(*args, **kwargs)
except grpc.RpcError as rpc_error:
Logger.w(rpc_error.code())
Logger.w(rpc_error.details())
if not silent:
Logger.w(rpc_error.code())
Logger.w(rpc_error.details())
os._exit(1)
except Exception as e:
Logger.exception(e)
os._exit(1)
Expand Down
132 changes: 60 additions & 72 deletions janky-integration-test.py → integration-test.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,24 @@
# pylint: disable=no-member,no-name-in-module
import requests
import base64
import requests

from google.protobuf.struct_pb2 import Struct
from instill.clients import get_client
from instill.resources.model import GithubModel

from instill.resources.connector_ai import InstillModelConnector
from instill.resources.connector import Connector
from instill.resources.pipeline import Pipeline
import instill.protogen.model.model.v1alpha.model_pb2 as model_interface
import instill.protogen.vdp.connector.v1alpha.connector_pb2 as connector_interface
import instill.protogen.vdp.pipeline.v1alpha.pipeline_pb2 as pipeline_interface
import instill.protogen.model.model.v1alpha.task_classification_pb2 as classification
from instill.resources import (
Connector,
InstillModelConnector,
Pipeline,
model_pb,
connector_pb,
pipeline_pb,
task_detection,
create_start_operator,
create_end_operator,
create_recipe,
)

from instill.utils.logger import Logger

local_model = {
Expand All @@ -29,15 +35,17 @@
try:
client = get_client()

# ======================== mgmt
# ================================================================================================================
# ===================================================== mgmt =====================================================
assert client.mgmt_service.is_serving()
Logger.i("mgmt client created, assert status == serving: True")

user = client.mgmt_service.get_user()
assert user.id == "admin"
Logger.i("mgmt get user, assert default user id == admin: True")

# ======================== model
# ================================================================================================================
# ===================================================== model ====================================================
assert client.model_serevice.is_serving()
Logger.i("model client created, assert status == serving: True")

Expand All @@ -48,26 +56,26 @@
model_tag=github_model["model_tag"],
)

assert model.get_state() == model_interface.Model.STATE_OFFLINE
assert model.get_state() == model_pb.Model.STATE_OFFLINE
Logger.i("model created, assert STATE_OFFLINE: True")

model.deploy()
assert model.get_state() == model_interface.Model.STATE_ONLINE
assert model.get_state() == model_pb.Model.STATE_ONLINE
Logger.i("model deployed, assert STATE_ONLINE: True")

task_inputs = [
model_interface.TaskInput(
classification=classification.ClassificationInput(
model_pb.TaskInput(
detection=task_detection.DetectionInput(
image_url="https://artifacts.instill.tech/imgs/dog.jpg"
)
),
model_interface.TaskInput(
classification=classification.ClassificationInput(
model_pb.TaskInput(
detection=task_detection.DetectionInput(
image_url="https://artifacts.instill.tech/imgs/bear.jpg"
)
),
model_interface.TaskInput(
classification=classification.ClassificationInput(
model_pb.TaskInput(
detection=task_detection.DetectionInput(
image_url="https://artifacts.instill.tech/imgs/polar-bear.jpg"
)
),
Expand All @@ -83,7 +91,8 @@
assert outputs[2].detection.objects[0].category == "bear"
Logger.i("inference done, assert output 2 category == bear: True")

# ======================== connector
# ================================================================================================================
# ================================================== connector ===================================================
assert client.connector_service.is_serving()
Logger.i("connector client created, assert status == serving: True")

Expand All @@ -94,16 +103,13 @@
)
assert (
instill_connector.get_state()
== connector_interface.ConnectorResource.STATE_DISCONNECTED
== connector_pb.ConnectorResource.STATE_DISCONNECTED
)
Logger.i(
"instill model connector created, assert state == STATE_DISCONNECTED: True"
)

assert (
instill_connector.test()
== connector_interface.ConnectorResource.STATE_CONNECTED
)
assert instill_connector.test() == connector_pb.ConnectorResource.STATE_CONNECTED
Logger.i("instill model connector, assert state == STATE_CONNECTED: True")

config = {"destination_path": "/local/test-1"}
Expand All @@ -114,46 +120,36 @@
configuration=config,
)
assert (
csv_connector.get_state()
== connector_interface.ConnectorResource.STATE_DISCONNECTED
csv_connector.get_state() == connector_pb.ConnectorResource.STATE_DISCONNECTED
)
Logger.i("csv connector created, assert state == STATE_DISCONNECTED: True")

assert csv_connector.test() == connector_interface.ConnectorResource.STATE_CONNECTED
assert csv_connector.test() == connector_pb.ConnectorResource.STATE_CONNECTED
Logger.i("tested csv connector, assert state == STATE_CONNECTED: True")

# ======================== csv pipeline
# ================================================================================================================
# ================================================= csv pipeline =================================================
assert client.pipeline_service.is_serving()
Logger.i("pipeline client created, assert status == serving: True")

start_operator_component = pipeline_interface.Component()
start_operator_component.id = "start"
start_operator_component.definition_name = "operator-definitions/start-operator"
start_operator_component.configuration.update(
{"metadata": {"input": {"title": "Input", "type": "text"}}}
start_operator_component = create_start_operator(
config={"metadata": {"input": {"title": "Input", "type": "text"}}}
)
end_operator_component = pipeline_interface.Component()
end_operator_component.id = "end"
end_operator_component.definition_name = "operator-definitions/end-operator"
end_operator_component.configuration.update(
{

end_operator_component = create_end_operator(
config={
"metadata": {"answer": {"title": "Answer"}},
"input": {"answer": "{ d01.input }"},
}
)
csv_connector_component = pipeline_interface.Component()
csv_connector_component.id = "d01"
csv_connector_component.resource_name = f"{user.name}/connector-resources/csv"
csv_connector_component.definition_name = (
"connector-definitions/airbyte-destination-csv"

csv_connector_component = csv_connector.create_component(
name="d01", config={"input": {"text": "{ start.input }"}}
)
csv_connector_component.configuration.update({"input": {"text": "{ start.input }"}})

recipe = pipeline_interface.Recipe()
recipe.version = "v1alpha"
recipe.components.append(start_operator_component)
recipe.components.append(end_operator_component)
recipe.components.append(csv_connector_component)
recipe = create_recipe(
[start_operator_component, end_operator_component, csv_connector_component]
)

csv_pipeline = Pipeline(client=client, name="csv-pipeline", recipe=recipe)
csv_pipeline.validate_pipeline()
Expand All @@ -163,39 +159,31 @@
assert csv_pipeline([i])[0][0]["answer"]["text"] == "instill-ai rocks"
Logger.i("csv-pipeline triggered, output matched input: True")

# ======================== instill model pipeline
start_operator_component = pipeline_interface.Component()
start_operator_component.id = "start"
start_operator_component.definition_name = "operator-definitions/start-operator"
start_operator_component.configuration.update(
# =================================================================================================================
# ============================================= instill model pipeline ============================================
start_operator_component = create_start_operator(
{"metadata": {"input": {"title": "input", "type": "image"}}}
)
instill_model_connector_component = pipeline_interface.Component()
instill_model_connector_component.id = "yolov7"
instill_model_connector_component.resource_name = (
f"{user.name}/connector-resources/instill"
)
instill_model_connector_component.definition_name = (
instill_connector.get_definition().name

end_operator_component = create_end_operator(
config={
"input": {"output": "{ yolov7.output.objects }"},
"metadata": {"output": {}},
}
)
instill_model_connector_component.configuration.update(
{

instill_model_connector_component = instill_connector.create_component(
name="yolov7",
config={
"input": {
"task": "TASK_DETECTION",
"image_base64": "{ start.input }",
"model_name": "users/admin/models/yolov7",
},
}
)

end_operator_component = pipeline_interface.Component()
end_operator_component.id = "end"
end_operator_component.definition_name = "operator-definitions/end-operator"
end_operator_component.configuration.update(
{"input": {"output": "{ yolov7.output.objects }"}, "metadata": {"output": {}}}
},
)

recipe = pipeline_interface.Recipe()
recipe = pipeline_pb.Recipe()
recipe.version = "v1alpha"
recipe.components.append(start_operator_component)
recipe.components.append(instill_model_connector_component)
Expand Down