diff --git a/bin/specs b/bin/specs index f2fe03e..bf90686 100755 --- a/bin/specs +++ b/bin/specs @@ -3,7 +3,7 @@ echo "Downloading component/schema.json..." curl https://raw.githubusercontent.com/instill-ai/component/88cf79188084a6f4afd87f715e13f334d3674013/schema.json -L -s -o ./instill/resources/schema/jsons/component.json -connector_schemas=("airbyte" "bigquery" "googlecloudstorage" "stabilityai" "googlesearch" "huggingface" "instill" "numbers" "openai" "pinecone" "redis" "restapi" "website") +connector_schemas=("archetypeai" "airbyte" "bigquery" "googlecloudstorage" "stabilityai" "googlesearch" "huggingface" "instill" "numbers" "openai" "pinecone" "redis" "restapi" "website") for connector in ${connector_schemas[@]}; do echo "=====================@@@ Fetching and processing $connector @@@=====================" diff --git a/instill/clients/base.py b/instill/clients/base.py index 1d4bd87..a9498f3 100644 --- a/instill/clients/base.py +++ b/instill/clients/base.py @@ -6,7 +6,7 @@ class Client(ABC): - """Base interface class for creating mgmt/pipeline/connector/model clients. + """Base interface class for creating mgmt/pipeline/model clients. Args: ABC (abc.ABCMeta): std abstract class diff --git a/instill/clients/mgmt.py b/instill/clients/mgmt.py index 2d00b90..0c1b8e9 100644 --- a/instill/clients/mgmt.py +++ b/instill/clients/mgmt.py @@ -559,6 +559,25 @@ def get_user( metadata=self.hosts[self.instance].metadata, ).send_sync() + @grpc_handler + def get_remaining_credit( + self, + name: str, + async_enabled: bool = False, + ) -> mgmt_interface.GetRemainingCreditResponse: + if async_enabled: + return RequestFactory( + method=self.hosts[self.instance].async_client.GetRemainingCredit, + request=mgmt_interface.GetRemainingCreditRequest(owner=f"users/{name}"), + metadata=self.hosts[self.instance].metadata, + ).send_async() + + return RequestFactory( + method=self.hosts[self.instance].client.GetRemainingCredit, + request=mgmt_interface.GetRemainingCreditRequest(owner=f"users/{name}"), + metadata=self.hosts[self.instance].metadata, + ).send_sync() + @grpc_handler def get_org( self, @@ -637,63 +656,3 @@ def list_pipeline_trigger_chart_records( request=metric_interface.ListPipelineTriggerChartRecordsRequest(), metadata=self.hosts[self.instance].metadata, ).send_sync() - - @grpc_handler - def list_connector_execute_records( - self, - async_enabled: bool = False, - ) -> metric_interface.ListConnectorExecuteRecordsResponse: - if async_enabled: - return RequestFactory( - method=self.hosts[ - self.instance - ].async_client.ListConnectorExecuteRecords, - request=metric_interface.ListConnectorExecuteRecordsRequest(), - metadata=self.hosts[self.instance].metadata, - ).send_async() - - return RequestFactory( - method=self.hosts[self.instance].client.ListConnectorExecuteRecords, - request=metric_interface.ListConnectorExecuteRecordsRequest(), - metadata=self.hosts[self.instance].metadata, - ).send_sync() - - @grpc_handler - def list_connector_execute_table_records( - self, - async_enabled: bool = False, - ) -> metric_interface.ListConnectorExecuteTableRecordsResponse: - if async_enabled: - return RequestFactory( - method=self.hosts[ - self.instance - ].async_client.ListConnectorExecuteTableRecords, - request=metric_interface.ListConnectorExecuteTableRecordsRequest(), - metadata=self.hosts[self.instance].metadata, - ).send_async() - - return RequestFactory( - method=self.hosts[self.instance].client.ListConnectorExecuteTableRecords, - request=metric_interface.ListConnectorExecuteTableRecordsRequest(), - metadata=self.hosts[self.instance].metadata, - ).send_sync() - - @grpc_handler - def list_connector_execute_chart_records( - self, - async_enabled: bool = False, - ) -> metric_interface.ListConnectorExecuteChartRecordsResponse: - if async_enabled: - return RequestFactory( - method=self.hosts[ - self.instance - ].async_client.ListConnectorExecuteChartRecords, - request=metric_interface.ListConnectorExecuteChartRecordsRequest(), - metadata=self.hosts[self.instance].metadata, - ).send_async() - - return RequestFactory( - method=self.hosts[self.instance].client.ListConnectorExecuteChartRecords, - request=metric_interface.ListConnectorExecuteChartRecordsRequest(), - metadata=self.hosts[self.instance].metadata, - ).send_sync() diff --git a/instill/clients/pipeline.py b/instill/clients/pipeline.py index ad80a1a..1e81851 100644 --- a/instill/clients/pipeline.py +++ b/instill/clients/pipeline.py @@ -7,10 +7,9 @@ import instill.protogen.common.healthcheck.v1beta.healthcheck_pb2 as healthcheck # pipeline -import instill.protogen.vdp.pipeline.v1beta.connector_pb2 as connector_interface -import instill.protogen.vdp.pipeline.v1beta.operator_definition_pb2 as operator_interface import instill.protogen.vdp.pipeline.v1beta.pipeline_pb2 as pipeline_interface import instill.protogen.vdp.pipeline.v1beta.pipeline_public_service_pb2_grpc as pipeline_service +import instill.protogen.vdp.pipeline.v1beta.secret_pb2 as secret_interface from instill.clients.base import Client, RequestFactory from instill.clients.constant import DEFAULT_INSTANCE from instill.clients.instance import InstillInstance @@ -111,60 +110,6 @@ def is_serving(self) -> bool: except Exception: return False - @grpc_handler - def list_operator_definitions( - self, - filter_str: str = "", - next_page_token: str = "", - total_size: int = 100, - async_enabled: bool = False, - ) -> operator_interface.ListOperatorDefinitionsResponse: - if async_enabled: - return RequestFactory( - method=self.hosts[self.instance].async_client.ListOperatorDefinitions, - request=operator_interface.ListOperatorDefinitionsRequest( - filter=filter_str, - page_size=total_size, - page_token=next_page_token, - view=operator_interface.OperatorDefinition.VIEW_FULL, - ), - metadata=self.hosts[self.instance].metadata, - ).send_async() - - return RequestFactory( - method=self.hosts[self.instance].client.ListOperatorDefinitions, - request=operator_interface.ListOperatorDefinitionsRequest( - filter=filter_str, - page_size=total_size, - page_token=next_page_token, - view=operator_interface.OperatorDefinition.VIEW_FULL, - ), - metadata=self.hosts[self.instance].metadata, - ).send_sync() - - @grpc_handler - def get_operator_definition( - self, name: str, async_enabled: bool = False - ) -> operator_interface.GetOperatorDefinitionResponse: - if async_enabled: - return RequestFactory( - method=self.hosts[self.instance].async_client.GetOperatorDefinition, - request=operator_interface.GetOperatorDefinitionRequest( - name=f"operator-definitions/{name}", - view=operator_interface.OperatorDefinition.VIEW_FULL, - ), - metadata=self.hosts[self.instance].metadata, - ).send_async() - - return RequestFactory( - method=self.hosts[self.instance].client.GetOperatorDefinition, - request=operator_interface.GetOperatorDefinitionRequest( - name=f"operator-definitions/{name}", - view=operator_interface.OperatorDefinition.VIEW_FULL, - ), - metadata=self.hosts[self.instance].metadata, - ).send_sync() - @grpc_handler def create_pipeline( self, @@ -318,6 +263,32 @@ def validate_pipeline( metadata=self.hosts[self.instance].metadata, ).send_sync() + @grpc_handler + def clone_pipeline( + self, + name: str, + target: str, + async_enabled: bool = False, + ) -> pipeline_interface.CloneUserPipelineResponse: + if async_enabled: + return RequestFactory( + method=self.hosts[self.instance].async_client.CloneUserPipeline, + request=pipeline_interface.CloneUserPipelineRequest( + name=f"{self.target_namespace}/pipelines/{name}", + target=f"{self.target_namespace}/pipelines/{target}", + ), + metadata=self.hosts[self.instance].metadata, + ).send_async() + + return RequestFactory( + method=self.hosts[self.instance].client.CloneUserPipeline, + request=pipeline_interface.CloneUserPipelineRequest( + name=f"{self.target_namespace}/pipelines/{name}", + target=f"{self.target_namespace}/pipelines/{target}", + ), + metadata=self.hosts[self.instance].metadata, + ).send_sync() + @grpc_handler def trigger_pipeline( self, @@ -691,29 +662,6 @@ def restore_pipeline_release( metadata=self.hosts[self.instance].metadata, ).send_sync() - @grpc_handler - def watch_pipeline_release( - self, - name: str, - async_enabled: bool = False, - ) -> pipeline_interface.WatchUserPipelineReleaseResponse: - if async_enabled: - return RequestFactory( - method=self.hosts[self.instance].async_client.WatchUserPipelineRelease, - request=pipeline_interface.WatchUserPipelineReleaseRequest( - name=f"{self.target_namespace}/pipelines/{name}", - ), - metadata=self.hosts[self.instance].metadata, - ).send_async() - - return RequestFactory( - method=self.hosts[self.instance].client.WatchUserPipelineRelease, - request=pipeline_interface.WatchUserPipelineReleaseRequest( - name=f"{self.target_namespace}/pipelines/{name}", - ), - metadata=self.hosts[self.instance].metadata, - ).send_sync() - @grpc_handler def trigger_pipeline_release( self, @@ -766,6 +714,134 @@ def trigger_async_pipeline_release( metadata=self.hosts[self.instance].metadata, ).send_sync() + @grpc_handler + def create_secret( + self, + name: str, + value: str, + async_enabled: bool = False, + ) -> secret_interface.CreateUserSecretResponse: + secret = secret_interface.Secret(id=name, value=value) + if async_enabled: + return RequestFactory( + method=self.hosts[self.instance].async_client.CreateUserSecret, + request=secret_interface.CreateUserSecretRequest( + secret=secret, + parent=self.target_namespace, + ), + metadata=self.hosts[self.instance].metadata, + ).send_async() + + return RequestFactory( + method=self.hosts[self.instance].client.CreateUserSecret, + request=secret_interface.CreateUserSecretRequest( + secret=secret, + parent=self.target_namespace, + ), + metadata=self.hosts[self.instance].metadata, + ).send_sync() + + @grpc_handler + def get_secret( + self, + name: str, + async_enabled: bool = False, + ) -> secret_interface.GetUserSecretResponse: + if async_enabled: + return RequestFactory( + method=self.hosts[self.instance].async_client.GetUserSecret, + request=secret_interface.GetUserSecretRequest( + name=f"{self.target_namespace}/secrets/{name}", + ), + metadata=self.hosts[self.instance].metadata, + ).send_async() + + return RequestFactory( + method=self.hosts[self.instance].client.GetUserSecret, + request=secret_interface.GetUserSecretRequest( + name=f"{self.target_namespace}/secrets/{name}", + ), + metadata=self.hosts[self.instance].metadata, + ).send_sync() + + @grpc_handler + def update_secrets( + self, + secret: secret_interface.Secret, + mask: field_mask_pb2.FieldMask, + async_enabled: bool = False, + ) -> secret_interface.UpdateUserSecretResponse: + if async_enabled: + return RequestFactory( + method=self.hosts[self.instance].async_client.UpdateUserSecret, + request=secret_interface.UpdateUserSecretRequest( + secret=secret, + update_mask=mask, + ), + metadata=self.hosts[self.instance].metadata, + ).send_async() + + return RequestFactory( + method=self.hosts[self.instance].client.UpdateUserSecret, + request=secret_interface.UpdateUserSecretRequest( + secret=secret, + update_mask=mask, + ), + metadata=self.hosts[self.instance].metadata, + ).send_sync() + + @grpc_handler + def delete_secret( + self, + name: str, + async_enabled: bool = False, + ) -> secret_interface.DeleteUserSecretResponse: + if async_enabled: + return RequestFactory( + method=self.hosts[self.instance].async_client.DeleteUserSecret, + request=secret_interface.DeleteUserSecretRequest( + name=f"{self.target_namespace}/secrets/{name}", + ), + metadata=self.hosts[self.instance].metadata, + ).send_async() + + return RequestFactory( + method=self.hosts[self.instance].client.DeleteUserSecret, + request=secret_interface.DeleteUserSecretRequest( + name=f"{self.target_namespace}/secrets/{name}", + ), + metadata=self.hosts[self.instance].metadata, + ).send_sync() + + @grpc_handler + def list_secrets( + self, + next_page_token: str = "", + total_size: int = 100, + async_enabled: bool = False, + ) -> secret_interface.ListUserSecretsResponse: + if async_enabled: + return RequestFactory( + method=self.hosts[self.instance].async_client.ListUserSecrets, + request=secret_interface.ListUserSecretsRequest( + parent=self.target_namespace, + page_size=total_size, + page_token=next_page_token, + ), + metadata=self.hosts[self.instance].metadata, + ).send_async() + return RequestFactory( + method=self.hosts[self.instance].client.ListUserSecrets, + request=secret_interface.ListUserSecretsRequest( + parent=self.target_namespace, + page_size=total_size, + page_token=next_page_token, + ), + metadata=self.hosts[self.instance].metadata, + ).send_sync() + + ######## organization endpoints + @grpc_handler def create_org_pipeline( self, @@ -880,7 +956,7 @@ def validate_org_pipeline( self, name: str, async_enabled: bool = False, - ) -> pipeline_interface.ValidateUserPipelineResponse: + ) -> pipeline_interface.ValidateOrganizationPipelineResponse: if async_enabled: return RequestFactory( method=self.hosts[ @@ -900,6 +976,32 @@ def validate_org_pipeline( metadata=self.hosts[self.instance].metadata, ).send_sync() + @grpc_handler + def clone_org_pipeline( + self, + name: str, + target: str, + async_enabled: bool = False, + ) -> pipeline_interface.CloneOrganizationPipelineResponse: + if async_enabled: + return RequestFactory( + method=self.hosts[self.instance].async_client.CloneOrganizationPipeline, + request=pipeline_interface.CloneOrganizationPipelineRequest( + name=f"{self.target_namespace}/pipelines/{name}", + target=f"{self.target_namespace}/pipelines/{target}", + ), + metadata=self.hosts[self.instance].metadata, + ).send_async() + + return RequestFactory( + method=self.hosts[self.instance].client.CloneOrganizationPipeline, + request=pipeline_interface.CloneOrganizationPipelineRequest( + name=f"{self.target_namespace}/pipelines/{name}", + target=f"{self.target_namespace}/pipelines/{target}", + ), + metadata=self.hosts[self.instance].metadata, + ).send_sync() + @grpc_handler def trigger_org_pipeline( self, @@ -1236,31 +1338,6 @@ def restore_org_pipeline_release( metadata=self.hosts[self.instance].metadata, ).send_sync() - @grpc_handler - def watch_org_pipeline_release( - self, - name: str, - async_enabled: bool = False, - ) -> pipeline_interface.WatchOrganizationPipelineReleaseResponse: - if async_enabled: - return RequestFactory( - method=self.hosts[ - self.instance - ].async_client.WatchOrganizationPipelineRelease, - request=pipeline_interface.WatchOrganizationPipelineReleaseRequest( - name=f"{self.target_namespace}/pipelines/{name}", - ), - metadata=self.hosts[self.instance].metadata, - ).send_async() - - return RequestFactory( - method=self.hosts[self.instance].client.WatchOrganizationPipelineRelease, - request=pipeline_interface.WatchOrganizationPipelineReleaseRequest( - name=f"{self.target_namespace}/pipelines/{name}", - ), - metadata=self.hosts[self.instance].metadata, - ).send_sync() - @grpc_handler def trigger_org_pipeline_release( self, @@ -1316,404 +1393,127 @@ def trigger_async_org_pipeline_release( ).send_sync() @grpc_handler - def create_connector( + def create_org_secret( self, name: str, - definition: str, - configuration: dict, + value: str, async_enabled: bool = False, - ) -> connector_interface.CreateUserConnectorResponse: - connector = connector_interface.Connector() - connector.id = name - connector.connector_definition_name = definition - connector.configuration.update(configuration) + ) -> secret_interface.CreateOrganizationSecretResponse: + secret = secret_interface.Secret(id=name, value=value) if async_enabled: return RequestFactory( - method=self.hosts[self.instance].async_client.CreateUserConnector, - request=connector_interface.CreateUserConnectorRequest( - connector=connector, parent=self.target_namespace - ), - metadata=self.hosts[self.instance].metadata, - ).send_async() - - return RequestFactory( - method=self.hosts[self.instance].client.CreateUserConnector, - request=connector_interface.CreateUserConnectorRequest( - connector=connector, parent=self.target_namespace - ), - metadata=self.hosts[self.instance].metadata, - ).send_sync() - - @grpc_handler - def get_connector( - self, - name: str, - async_enabled: bool = False, - ) -> connector_interface.GetUserConnectorResponse: - if async_enabled: - return RequestFactory( - method=self.hosts[self.instance].async_client.GetUserConnector, - request=connector_interface.GetUserConnectorRequest( - name=f"{self.target_namespace}/connectors/{name}", - view=connector_interface.Connector.VIEW_FULL, - ), - metadata=self.hosts[self.instance].metadata, - ).send_async() - - return RequestFactory( - method=self.hosts[self.instance].client.GetUserConnector, - request=connector_interface.GetUserConnectorRequest( - name=f"{self.target_namespace}/connectors/{name}", - view=connector_interface.Connector.VIEW_FULL, - ), - metadata=self.hosts[self.instance].metadata, - ).send_sync() - - @grpc_handler - def test_connector( - self, - name: str, - async_enabled: bool = False, - ) -> connector_interface.TestUserConnectorResponse: - if async_enabled: - return RequestFactory( - method=self.hosts[self.instance].async_client.TestUserConnector, - request=connector_interface.TestUserConnectorRequest( - name=f"{self.target_namespace}/connectors/{name}" - ), - metadata=self.hosts[self.instance].metadata, - ).send_async() - - return RequestFactory( - method=self.hosts[self.instance].client.TestUserConnector, - request=connector_interface.TestUserConnectorRequest( - name=f"{self.target_namespace}/connectors/{name}" - ), - metadata=self.hosts[self.instance].metadata, - ).send_sync() - - @grpc_handler - def execute_connector( - self, - name: str, - inputs: list, - async_enabled: bool = False, - ) -> connector_interface.ExecuteUserConnectorResponse: - if async_enabled: - return RequestFactory( - method=self.hosts[self.instance].async_client.ExecuteUserConnector, - request=connector_interface.ExecuteUserConnectorRequest( - name=f"{self.target_namespace}/connectors/{name}", inputs=inputs - ), - metadata=self.hosts[self.instance].metadata, - ).send_async() - - return RequestFactory( - method=self.hosts[self.instance].client.ExecuteUserConnector, - request=connector_interface.ExecuteUserConnectorRequest( - name=f"{self.target_namespace}/connectors/{name}", inputs=inputs - ), - metadata=self.hosts[self.instance].metadata, - ).send_sync() - - @grpc_handler - def watch_connector( - self, - name: str, - async_enabled: bool = False, - ) -> connector_interface.WatchUserConnectorResponse: - if async_enabled: - return RequestFactory( - method=self.hosts[self.instance].async_client.WatchUserConnector, - request=connector_interface.WatchUserConnectorRequest( - name=f"{self.target_namespace}/connectors/{name}" - ), - metadata=self.hosts[self.instance].metadata, - ).send_async() - - return RequestFactory( - method=self.hosts[self.instance].client.WatchUserConnector, - request=connector_interface.WatchUserConnectorRequest( - name=f"{self.target_namespace}/connectors/{name}" - ), - metadata=self.hosts[self.instance].metadata, - ).send_sync() - - @grpc_handler - def delete_connector( - self, - name: str, - async_enabled: bool = False, - ) -> connector_interface.DeleteUserConnectorResponse: - if async_enabled: - return RequestFactory( - method=self.hosts[self.instance].async_client.DeleteUserConnector, - request=connector_interface.DeleteUserConnectorRequest( - name=f"{self.target_namespace}/connectors/{name}" - ), - metadata=self.hosts[self.instance].metadata, - ).send_async() - - return RequestFactory( - method=self.hosts[self.instance].client.DeleteUserConnector, - request=connector_interface.DeleteUserConnectorRequest( - name=f"{self.target_namespace}/connectors/{name}" - ), - metadata=self.hosts[self.instance].metadata, - ).send_sync() - - @grpc_handler - def list_connectors( - self, - filer_str: str = "", - next_page_token: str = "", - total_size: int = 100, - show_deleted: bool = False, - public=False, - async_enabled: bool = False, - ) -> connector_interface.ListUserConnectorsResponse: - if async_enabled: - if public: - method = self.hosts[self.instance].async_client.ListConnectors - return RequestFactory( - method=method, - request=connector_interface.ListConnectorsRequest( - filter=filer_str, - page_size=total_size, - page_token=next_page_token, - show_deleted=show_deleted, - view=connector_interface.Connector.VIEW_FULL, - ), - metadata=self.hosts[self.instance].metadata, - ).send_async() - method = self.hosts[self.instance].async_client.ListUserConnectors - return RequestFactory( - method=method, - request=connector_interface.ListUserConnectorsRequest( + method=self.hosts[self.instance].async_client.CreateOrganizationSecret, + request=secret_interface.CreateUserSecretRequest( + secret=secret, parent=self.target_namespace, - filter=filer_str, - page_size=total_size, - page_token=next_page_token, - show_deleted=show_deleted, - view=connector_interface.Connector.VIEW_FULL, - ), - metadata=self.hosts[self.instance].metadata, - ).send_async() - if public: - method = self.hosts[self.instance].client.ListConnectors - return RequestFactory( - method=method, - request=connector_interface.ListConnectorsRequest( - filter=filer_str, - page_size=total_size, - page_token=next_page_token, - show_deleted=show_deleted, - view=connector_interface.Connector.VIEW_FULL, - ), - metadata=self.hosts[self.instance].metadata, - ).send_sync() - method = self.hosts[self.instance].client.ListUserConnectors - return RequestFactory( - method=method, - request=connector_interface.ListUserConnectorsRequest( - parent=self.target_namespace, - filter=filer_str, - page_size=total_size, - page_token=next_page_token, - show_deleted=show_deleted, - view=connector_interface.Connector.VIEW_FULL, - ), - metadata=self.hosts[self.instance].metadata, - ).send_sync() - - @grpc_handler - def create_org_connector( - self, - name: str, - definition: str, - configuration: dict, - async_enabled: bool = False, - ) -> connector_interface.CreateOrganizationConnectorResponse: - connector = connector_interface.Connector() - connector.id = name - connector.connector_definition_name = definition - connector.configuration.update(configuration) - if async_enabled: - return RequestFactory( - method=self.hosts[ - self.instance - ].async_client.CreateOrganizationConnector, - request=connector_interface.CreateOrganizationConnectorRequest( - connector=connector, parent=self.target_namespace ), metadata=self.hosts[self.instance].metadata, ).send_async() return RequestFactory( - method=self.hosts[self.instance].client.CreateOrganizationConnector, - request=connector_interface.CreateOrganizationConnectorRequest( - connector=connector, parent=self.target_namespace - ), - metadata=self.hosts[self.instance].metadata, - ).send_sync() - - @grpc_handler - def get_org_connector( - self, - name: str, - async_enabled: bool = False, - ) -> connector_interface.GetOrganizationConnectorResponse: - if async_enabled: - return RequestFactory( - method=self.hosts[self.instance].async_client.GetOrganizationConnector, - request=connector_interface.GetOrganizationConnectorRequest( - name=f"{self.target_namespace}/connectors/{name}", - view=connector_interface.Connector.VIEW_FULL, - ), - metadata=self.hosts[self.instance].metadata, - ).send_async() - - return RequestFactory( - method=self.hosts[self.instance].client.GetOrganizationConnector, - request=connector_interface.GetOrganizationConnectorRequest( - name=f"{self.target_namespace}/connectors/{name}", - view=connector_interface.Connector.VIEW_FULL, - ), - metadata=self.hosts[self.instance].metadata, - ).send_sync() - - @grpc_handler - def test_org_connector( - self, - name: str, - async_enabled: bool = False, - ) -> connector_interface.TestOrganizationConnectorResponse: - if async_enabled: - return RequestFactory( - method=self.hosts[self.instance].async_client.TestOrganizationConnector, - request=connector_interface.TestOrganizationConnectorRequest( - name=f"{self.target_namespace}/connectors/{name}" - ), - metadata=self.hosts[self.instance].metadata, - ).send_async() - - return RequestFactory( - method=self.hosts[self.instance].client.TestOrganizationConnector, - request=connector_interface.TestOrganizationConnectorRequest( - name=f"{self.target_namespace}/connectors/{name}" + method=self.hosts[self.instance].client.CreateOrganizationSecret, + request=secret_interface.CreateUserSecretRequest( + secret=secret, + parent=self.target_namespace, ), metadata=self.hosts[self.instance].metadata, ).send_sync() @grpc_handler - def execute_org_connector( + def get_org_secret( self, name: str, - inputs: list, async_enabled: bool = False, - ) -> connector_interface.ExecuteOrganizationConnectorResponse: + ) -> secret_interface.GetOrganizationSecretResponse: if async_enabled: return RequestFactory( - method=self.hosts[ - self.instance - ].async_client.ExecuteOrganizationConnector, - request=connector_interface.ExecuteOrganizationConnectorRequest( - name=f"{self.target_namespace}/connectors/{name}", inputs=inputs + method=self.hosts[self.instance].async_client.GetOrganizationSecret, + request=secret_interface.GetOrganizationSecretRequest( + name=f"{self.target_namespace}/secrets/{name}", ), metadata=self.hosts[self.instance].metadata, ).send_async() return RequestFactory( - method=self.hosts[self.instance].client.ExecuteOrganizationConnector, - request=connector_interface.ExecuteOrganizationConnectorRequest( - name=f"{self.target_namespace}/connectors/{name}", inputs=inputs + method=self.hosts[self.instance].client.GetOrganizationSecret, + request=secret_interface.GetOrganizationSecretRequest( + name=f"{self.target_namespace}/secrets/{name}", ), metadata=self.hosts[self.instance].metadata, ).send_sync() @grpc_handler - def watch_org_connector( + def update_org_secrets( self, - name: str, + secret: secret_interface.Secret, + mask: field_mask_pb2.FieldMask, async_enabled: bool = False, - ) -> connector_interface.WatchOrganizationConnectorResponse: + ) -> secret_interface.UpdateOrganizationSecretResponse: if async_enabled: return RequestFactory( - method=self.hosts[ - self.instance - ].async_client.WatchOrganizationConnector, - request=connector_interface.WatchOrganizationConnectorRequest( - name=f"{self.target_namespace}/connectors/{name}" + method=self.hosts[self.instance].async_client.UpdateOrganizationSecret, + request=secret_interface.UpdateOrganizationSecretRequest( + secret=secret, + update_mask=mask, ), metadata=self.hosts[self.instance].metadata, ).send_async() return RequestFactory( - method=self.hosts[self.instance].client.WatchOrganizationConnector, - request=connector_interface.WatchOrganizationConnectorRequest( - name=f"{self.target_namespace}/connectors/{name}" + method=self.hosts[self.instance].client.UpdateOrganizationSecret, + request=secret_interface.UpdateOrganizationSecretRequest( + secret=secret, + update_mask=mask, ), metadata=self.hosts[self.instance].metadata, ).send_sync() @grpc_handler - def delete_org_connector( + def delete_org_secret( self, name: str, async_enabled: bool = False, - ) -> connector_interface.DeleteOrganizationConnectorResponse: + ) -> secret_interface.DeleteOrganizationSecretResponse: if async_enabled: return RequestFactory( - method=self.hosts[ - self.instance - ].async_client.DeleteOrganizationConnector, - request=connector_interface.DeleteOrganizationConnectorRequest( - name=f"{self.target_namespace}/connectors/{name}" + method=self.hosts[self.instance].async_client.DeleteOrganizationSecret, + request=secret_interface.DeleteOrganizationSecretRequest( + name=f"{self.target_namespace}/secrets/{name}", ), metadata=self.hosts[self.instance].metadata, ).send_async() return RequestFactory( - method=self.hosts[self.instance].client.DeleteOrganizationConnector, - request=connector_interface.DeleteOrganizationConnectorRequest( - name=f"{self.target_namespace}/connectors/{name}" + method=self.hosts[self.instance].client.DeleteOrganizationSecret, + request=secret_interface.DeleteOrganizationSecretRequest( + name=f"{self.target_namespace}/secrets/{name}", ), metadata=self.hosts[self.instance].metadata, ).send_sync() @grpc_handler - def list_org_connectors( + def list_org_secrets( self, - filer_str: str = "", next_page_token: str = "", total_size: int = 100, - show_deleted: bool = False, async_enabled: bool = False, - ) -> connector_interface.ListOrganizationConnectorsResponse: + ) -> secret_interface.ListOrganizationSecretsResponse: if async_enabled: return RequestFactory( - method=self.hosts[ - self.instance - ].async_client.ListOrganizationConnectors, - request=connector_interface.ListOrganizationConnectorsRequest( + method=self.hosts[self.instance].async_client.ListOrganizationSecrets, + request=secret_interface.ListOrganizationSecretsRequest( parent=self.target_namespace, - filter=filer_str, page_size=total_size, page_token=next_page_token, - show_deleted=show_deleted, - view=connector_interface.Connector.VIEW_FULL, ), metadata=self.hosts[self.instance].metadata, ).send_async() return RequestFactory( - method=self.hosts[self.instance].client.ListOrganizationConnectors, - request=connector_interface.ListOrganizationConnectorsRequest( + method=self.hosts[self.instance].client.ListOrganizationSecrets, + request=secret_interface.ListOrganizationSecretsRequest( parent=self.target_namespace, - filter=filer_str, page_size=total_size, page_token=next_page_token, - show_deleted=show_deleted, - view=connector_interface.Connector.VIEW_FULL, ), metadata=self.hosts[self.instance].metadata, ).send_sync() diff --git a/instill/protogen b/instill/protogen index f870fda..116f0ab 160000 --- a/instill/protogen +++ b/instill/protogen @@ -1 +1 @@ -Subproject commit f870fda57b51e79706d350dba9a0f387233d4d12 +Subproject commit 116f0abcfc6a9599750ebdda7dd5acea4098a541 diff --git a/instill/resources/__init__.py b/instill/resources/__init__.py index 8e45f3f..dfcaa9f 100644 --- a/instill/resources/__init__.py +++ b/instill/resources/__init__.py @@ -6,18 +6,30 @@ import instill.protogen.model.model.v1alpha.task_semantic_segmentation_pb2 as task_semantic_segmentation import instill.protogen.model.model.v1alpha.task_text_generation_pb2 as task_text_generation import instill.protogen.model.model.v1alpha.task_text_to_image_pb2 as task_text_to_image -import instill.protogen.vdp.pipeline.v1beta.connector_pb2 as connector_pb import instill.protogen.vdp.pipeline.v1beta.pipeline_pb2 as pipeline_pb -from instill.resources.connector import Connector +from instill.resources.component import Component from instill.resources.connector_ai import ( + HuggingfaceConnector, InstillModelConnector, OpenAIConnector, StabilityAIConnector, ) from instill.resources.connector_blockchain import NumbersConnector -from instill.resources.connector_data import PineconeConnector +from instill.resources.connector_data import ( + BigQueryConnector, + GoogleCloudStorageConnector, + GoogleSearchConnector, + PineconeConnector, + RedisConnector, + WebsiteConnector, +) from instill.resources.model import GithubModel, HugginfaceModel, Model -from instill.resources.operator import create_end_operator, create_start_operator +from instill.resources.operator import ( + Base64Operator, + ImageOperator, + JSONOperator, + TextOperator, +) from instill.resources.pipeline import Pipeline from instill.resources.recipe import create_recipe from instill.resources.schema.helper import populate_default_value diff --git a/instill/resources/component.py b/instill/resources/component.py new file mode 100644 index 0000000..bb80d35 --- /dev/null +++ b/instill/resources/component.py @@ -0,0 +1,45 @@ +# pylint: disable=no-member,wrong-import-position,no-name-in-module +from typing import Union + +from instill.protogen.vdp.pipeline.v1beta.pipeline_pb2 import Component as Comp +from instill.protogen.vdp.pipeline.v1beta.pipeline_pb2 import ( + ConnectorComponent, + IteratorComponent, + OperatorComponent, +) +from instill.resources.errors import ComponentTypeExection + + +class Component: + def __init__( + self, + name: str, + component: Union[ + ConnectorComponent, + OperatorComponent, + IteratorComponent, + ], + ): + + if isinstance(component, ConnectorComponent): + c = Comp( + id=name, + connector_component=component, + ) + elif isinstance(component, OperatorComponent): + c = Comp( + id=name, + operator_component=component, + ) + elif isinstance(component, IteratorComponent): + c = Comp( + id=name, + iterator_component=component, + ) + else: + raise ComponentTypeExection + + self.c = c + + def get_component(self) -> Comp: + return self.c diff --git a/instill/resources/connector.py b/instill/resources/connector.py deleted file mode 100644 index 0a224a2..0000000 --- a/instill/resources/connector.py +++ /dev/null @@ -1,80 +0,0 @@ -# pylint: disable=no-member,wrong-import-position,no-name-in-module -import instill.protogen.vdp.pipeline.v1beta.connector_definition_pb2 as connector_definition_interface -import instill.protogen.vdp.pipeline.v1beta.connector_pb2 as connector_interface -import instill.protogen.vdp.pipeline.v1beta.pipeline_pb2 as pipeline_interface -from instill.clients import InstillClient -from instill.resources.resource import Resource - - -class Connector(Resource): - def __init__( - self, - client: InstillClient, - name: str, - definition: str, - configuration: dict, - ) -> None: - super().__init__() - self.client = client - get_resp = client.pipeline_service.get_connector(name=name, silent=True) - if get_resp is None: - connector = client.pipeline_service.create_connector( - name=name, - definition=definition, - configuration=configuration, - ).connector - if connector is None: - raise BaseException("connector creation failed") - else: - connector = get_resp.connector - - self.resource = connector - - def __call__(self, task_inputs: list, mode="execute"): - if mode == "execute": - resp = self.client.pipeline_service.execute_connector( - self.resource.id, task_inputs - ) - return resp.outputs - return self.test() - - @property - def client(self): - return self._client - - @client.setter - def client(self, client: InstillClient): - self._client = client - - @property - def resource(self): - return self._resource - - @resource.setter - def resource(self, resource: connector_interface.Connector): - self._resource = resource - - def _create_component( - self, - name: str, - config: dict, - ) -> pipeline_interface.Component: - component = pipeline_interface.Component() - component.id = name - component.definition_name = self.get_definition().name - component.resource_name = self.resource.name - component.configuration.update(config) - return component - - def get_definition(self) -> connector_definition_interface.ConnectorDefinition: - return self.resource.connector_definition - - def get_state(self) -> connector_interface.Connector.State: - return self.client.pipeline_service.watch_connector(self.resource.id).state - - def test(self) -> connector_interface.Connector.State: - return self.client.pipeline_service.test_connector(self.resource.id).state - - def delete(self): - if self.resource is not None: - self.client.pipeline_service.delete_connector(self.resource.id) diff --git a/instill/resources/connector_ai.py b/instill/resources/connector_ai.py index 6d28ffb..8730e7b 100644 --- a/instill/resources/connector_ai.py +++ b/instill/resources/connector_ai.py @@ -1,14 +1,11 @@ # pylint: disable=no-member,wrong-import-position,no-name-in-module,arguments-renamed -import json from typing import Union -import jsonschema - -from instill.clients import InstillClient -from instill.protogen.vdp.pipeline.v1beta.pipeline_pb2 import Component -from instill.resources import const -from instill.resources.connector import Connector +from instill.resources import Component from instill.resources.schema import ( + archetypeai_task_describe_input, + archetypeai_task_summarize_input, + archetypeai_task_upload_file_input, helper, huggingface_task_audio_classification_input, huggingface_task_conversational_input, @@ -46,35 +43,12 @@ stabilityai_task_image_to_image_input, stabilityai_task_text_to_image_input, ) -from instill.resources.schema.huggingface import HuggingFaceConnectorSpec -from instill.resources.schema.instill import ( - InstillModelConnector as InstillModelConnectorConfig, -) -from instill.resources.schema.openai import OpenAIConnectorResource -from instill.resources.schema.stabilityai import StabilityAIConnectorResource -class HuggingfaceConnector(Connector): +class HuggingfaceConnector(Component): """Huggingface Connector""" - with open( - f"{const.SPEC_PATH}/huggingface_definitions.json", "r", encoding="utf8" - ) as f: - definitions_jsonschema = json.loads(f.read()) - def __init__( - self, - client: InstillClient, - name: str, - config: HuggingFaceConnectorSpec, - ) -> None: - definition = "connector-definitions/hugging-face" - - config_dict = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_dict, StabilityAIConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_dict) - - def create_component( self, name: str, inp: Union[ @@ -97,30 +71,21 @@ def create_component( huggingface_task_zero_shot_classification_input.Input, huggingface_task_token_classification_input.Input, ], - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - + ) -> None: + definition_name = "connector-definitions/hugging-face" + component_type = "connector" -class InstillModelConnector(Connector): - """Instill Model Connector""" + component = helper.construct_component_config( + component_type, definition_name, inp + ) - with open(f"{const.SPEC_PATH}/instill_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) + super().__init__(name, component) - def __init__( - self, - client: InstillClient, - config: InstillModelConnectorConfig, - name: str = "model-connector", - ) -> None: - definition = "connector-definitions/instill-model" - config_dict = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_dict, InstillModelConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_dict) +class InstillModelConnector(Component): + """Instill Model Connector""" - def create_component( + def __init__( self, name: str, inp: Union[ @@ -135,62 +100,42 @@ def create_component( instill_task_text_to_image_input.Input, instill_task_visual_question_answering_input.Input, ], - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - + ) -> None: + definition_name = "connector-definitions/instill-model" + component_type = "connector" -class StabilityAIConnector(Connector): - """Stability AI Connector""" + component = helper.construct_component_config( + component_type, definition_name, inp + ) - with open( - f"{const.SPEC_PATH}/stabilityai_definitions.json", "r", encoding="utf8" - ) as f: - definitions_jsonschema = json.loads(f.read()) + super().__init__(name, component) - def __init__( - self, - client: InstillClient, - name: str, - config: StabilityAIConnectorResource, - ) -> None: - definition = "connector-definitions/stability-ai" - config_dict = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_dict, StabilityAIConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_dict) +class StabilityAIConnector(Component): + """Stability AI Connector""" - def create_component( + def __init__( self, name: str, inp: Union[ stabilityai_task_image_to_image_input.Input, stabilityai_task_text_to_image_input.Input, ], - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - + ) -> None: + definition_name = "connector-definitions/stability-ai" + component_type = "connector" -class OpenAIConnector(Connector): - """OpenAI Connector""" + component = helper.construct_component_config( + component_type, definition_name, inp + ) - with open(f"{const.SPEC_PATH}/openai_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) + super().__init__(name, component) - def __init__( - self, - client: InstillClient, - name: str, - config: OpenAIConnectorResource, - ) -> None: - definition = "connector-definitions/openai" - config_dict = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_dict, OpenAIConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_dict) +class OpenAIConnector(Component): + """OpenAI Connector""" - def create_component( + def __init__( self, name: str, inp: Union[ @@ -200,6 +145,34 @@ def create_component( openai_task_text_generation_input.Input, openai_task_text_to_speech_input.Input, ], - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) + ) -> None: + definition_name = "connector-definitions/openai" + component_type = "connector" + + component = helper.construct_component_config( + component_type, definition_name, inp + ) + + super().__init__(name, component) + + +class ArchetypeAIConnector(Component): + """ArchetypeAI Connector""" + + def __init__( + self, + name: str, + inp: Union[ + archetypeai_task_upload_file_input.Input, + archetypeai_task_describe_input.Input, + archetypeai_task_summarize_input.Input, + ], + ) -> None: + definition_name = "connector-definitions/archetype-ai" + component_type = "connector" + + component = helper.construct_component_config( + component_type, definition_name, inp + ) + + super().__init__(name, component) diff --git a/instill/resources/connector_airbyte.py b/instill/resources/connector_airbyte.py deleted file mode 100644 index e7e3d39..0000000 --- a/instill/resources/connector_airbyte.py +++ /dev/null @@ -1,1806 +0,0 @@ -# pylint: disable=no-member,wrong-import-position,no-name-in-module -import json - -import jsonschema - -from instill.clients import InstillClient -from instill.protogen.vdp.pipeline.v1beta.pipeline_pb2 import Component -from instill.resources import const -from instill.resources.connector import Connector -from instill.resources.schema import ( - airbyte, - airbyte_task_write_destination_input, - helper, -) - - -class AirbyteAmazonsqsConnector(Connector): - """Airbyte Amazonsqs Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Amazonsqs, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate( - config_spec, AirbyteAmazonsqsConnector.definitions_jsonschema - ) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteAwsdatalakeConnector(Connector): - """Airbyte Awsdatalake Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Awsdatalake, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate( - config_spec, AirbyteAwsdatalakeConnector.definitions_jsonschema - ) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteAzureblobstorageConnector(Connector): - """Airbyte Azureblobstorage Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Azureblobstorage, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate( - config, AirbyteAzureblobstorageConnector.definitions_jsonschema - ) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteBigqueryConnector(Connector): - """Airbyte Bigquery Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Bigquery, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate( - config_spec, AirbyteBigqueryConnector.definitions_jsonschema - ) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteCassandraConnector(Connector): - """Airbyte Cassandra Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Cassandra, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate( - config_spec, AirbyteCassandraConnector.definitions_jsonschema - ) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteChromaConnector(Connector): - """Airbyte Chroma Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Chroma, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, AirbyteChromaConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteClickhouseConnector(Connector): - """Airbyte Clickhouse Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Clickhouse, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate( - config_spec, AirbyteClickhouseConnector.definitions_jsonschema - ) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteConvexConnector(Connector): - """Airbyte Convex Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Convex, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, AirbyteConvexConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteCsvConnector(Connector): - """Airbyte Csv Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Csv, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, AirbyteCsvConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteCumulioConnector(Connector): - """Airbyte Cumulio Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Cumulio, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, AirbyteCumulioConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteDatabendConnector(Connector): - """Airbyte Databend Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Databend, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate( - config_spec, AirbyteDatabendConnector.definitions_jsonschema - ) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteDatabricksConnector(Connector): - """Airbyte Databricks Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Databricks, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate( - config_spec, AirbyteDatabricksConnector.definitions_jsonschema - ) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteDorisConnector(Connector): - """Airbyte Doris Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Doris, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, AirbyteDorisConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteDuckdbConnector(Connector): - """Airbyte Duckdb Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Duckdb, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, AirbyteDuckdbConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteDynamodbConnector(Connector): - """Airbyte Dynamodb Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Dynamodb, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate( - config_spec, AirbyteDynamodbConnector.definitions_jsonschema - ) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteE2etestConnector(Connector): - """Airbyte E2etest Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.E2etest, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, AirbyteE2etestConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteElasticsearchConnector(Connector): - """Airbyte Elasticsearch Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Elasticsearch, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate( - config, AirbyteElasticsearchConnector.definitions_jsonschema - ) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteExasolConnector(Connector): - """Airbyte Exasol Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Exasol, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, AirbyteExasolConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteFireboltConnector(Connector): - """Airbyte Firebolt Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Firebolt, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate( - config_spec, AirbyteFireboltConnector.definitions_jsonschema - ) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteFirestoreConnector(Connector): - """Airbyte Firestore Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Firestore, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate( - config_spec, AirbyteFirestoreConnector.definitions_jsonschema - ) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteGcsConnector(Connector): - """Airbyte Gcs Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Gcs, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, AirbyteGcsConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteGooglesheetsConnector(Connector): - """Airbyte Googlesheets Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Googlesheets, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate( - config_spec, AirbyteGooglesheetsConnector.definitions_jsonschema - ) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteIcebergConnector(Connector): - """Airbyte Iceberg Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Iceberg, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, AirbyteIcebergConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteKafkaConnector(Connector): - """Airbyte Kafka Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Kafka, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, AirbyteKafkaConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteKeenConnector(Connector): - """Airbyte Keen Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Keen, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, AirbyteKeenConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteKinesisConnector(Connector): - """Airbyte Kinesis Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Kinesis, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, AirbyteKinesisConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteLangchainConnector(Connector): - """Airbyte Langchain Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Langchain, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate( - config_spec, AirbyteLangchainConnector.definitions_jsonschema - ) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteLocaljsonConnector(Connector): - """Airbyte Localjson Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Localjson, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate( - config_spec, AirbyteLocaljsonConnector.definitions_jsonschema - ) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteMariadbcolumnstoreConnector(Connector): - """Airbyte Mariadbcolumnstore Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Mariadbcolumnstore, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate( - config, AirbyteMariadbcolumnstoreConnector.definitions_jsonschema - ) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteMeilisearchConnector(Connector): - """Airbyte Meilisearch Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Meilisearch, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate( - config_spec, AirbyteMeilisearchConnector.definitions_jsonschema - ) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteMilvusConnector(Connector): - """Airbyte Milvus Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Milvus, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, AirbyteMilvusConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteMongodbConnector(Connector): - """Airbyte Mongodb Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Mongodb, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, AirbyteMongodbConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteMqttConnector(Connector): - """Airbyte Mqtt Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Mqtt, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, AirbyteMqttConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbytMssqlConnector(Connector): - """Airbyte Mssql Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Mssql, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, AirbytMssqlConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteMysqlConnector(Connector): - """Airbyte Mysql Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Mysql, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, AirbyteMysqlConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteOracleConnector(Connector): - """Airbyte Oracle Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Oracle, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, AirbyteOracleConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbytePineconeConnector(Connector): - """Airbyte Pinecone Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Pinecone, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate( - config_spec, AirbytePineconeConnector.definitions_jsonschema - ) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbytePostgresConnector(Connector): - """Airbyte Postgres Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Postgres, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate( - config_spec, AirbytePostgresConnector.definitions_jsonschema - ) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbytePubsubConnector(Connector): - """Airbyte Pubsub Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Pubsub, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, AirbytePubsubConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbytePulsarConnector(Connector): - """Airbyte Pulsar Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Pulsar, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, AirbytePulsarConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteQdrantConnector(Connector): - """Airbyte Qdrant Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Qdrant, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, AirbyteQdrantConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteR2Connector(Connector): - """Airbyte R2 Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.R2, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, AirbyteR2Connector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteRabbitmqConnector(Connector): - """Airbyte Rabbitmq Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Rabbitmq, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate( - config_spec, AirbyteRabbitmqConnector.definitions_jsonschema - ) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteRedisConnector(Connector): - """Airbyte Redis Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Redis, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, AirbyteRedisConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteRedpandaConnector(Connector): - """Airbyte Redpanda Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Redpanda, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate( - config_spec, AirbyteRedpandaConnector.definitions_jsonschema - ) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteRedshiftConnector(Connector): - """Airbyte Redshift Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Redshift, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate( - config_spec, AirbyteRedshiftConnector.definitions_jsonschema - ) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteRocksetConnector(Connector): - """Airbyte Rockset Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Rockset, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, AirbyteRocksetConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteS3glueConnector(Connector): - """Airbyte S3glue Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.S3glue, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, AirbyteS3glueConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteS3Connector(Connector): - """Airbyte S3 Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.S3, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, AirbyteS3Connector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteScyllaConnector(Connector): - """Airbyte Scylla Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Scylla, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, AirbyteScyllaConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteSelectdbConnector(Connector): - """Airbyte Selectdb Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Selectdb, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate( - config_spec, AirbyteSelectdbConnector.definitions_jsonschema - ) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteSftpjsonConnector(Connector): - """Airbyte Sftpjson Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Sftpjson, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate( - config_spec, AirbyteSftpjsonConnector.definitions_jsonschema - ) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteSnowflakeConnector(Connector): - """Airbyte Snowflake Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Snowflake, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate( - config_spec, AirbyteSnowflakeConnector.definitions_jsonschema - ) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteSqliteConnector(Connector): - """Airbyte Sqlite Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Sqlite, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, AirbyteSqliteConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteStarburstgalaxyConnector(Connector): - """Airbyte Starburstgalaxy Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Starburstgalaxy, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate( - config, AirbyteStarburstgalaxyConnector.definitions_jsonschema - ) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteTeradataConnector(Connector): - """Airbyte Teradata Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Teradata, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate( - config_spec, AirbyteTeradataConnector.definitions_jsonschema - ) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteTidbConnector(Connector): - """Airbyte Tidb Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Tidb, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, AirbyteTidbConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteTimeplusConnector(Connector): - """Airbyte Timeplus Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Timeplus, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate( - config_spec, AirbyteTimeplusConnector.definitions_jsonschema - ) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteTypesenseConnector(Connector): - """Airbyte Typesense Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Typesense, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate( - config_spec, AirbyteTypesenseConnector.definitions_jsonschema - ) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteVerticaConnector(Connector): - """Airbyte Vertica Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Vertica, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, AirbyteVerticaConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteWeaviateConnector(Connector): - """Airbyte Weaviate Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Weaviate, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate( - config_spec, AirbyteWeaviateConnector.definitions_jsonschema - ) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteXataConnector(Connector): - """Airbyte Xata Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Xata, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, AirbyteXataConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteYugabytedbConnector(Connector): - """Airbyte Yugabytedb Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Yugabytedb, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate( - config_spec, AirbyteYugabytedbConnector.definitions_jsonschema - ) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) - - -class AirbyteAirbytedevmatecloudConnector(Connector): - """Airbyte Airbytedevmatecloud Connector""" - - with open(f"{const.SPEC_PATH}/airbyte_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - - def __init__( - self, - client: InstillClient, - name: str, - config: airbyte.Airbytedevmatecloud, - ) -> None: - definition = "connector-definitions/airbyte-destination" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate( - config, AirbyteAirbytedevmatecloudConnector.definitions_jsonschema - ) - super().__init__(client, name, definition, config_spec) - - def create_component( - self, - name: str, - inp: airbyte_task_write_destination_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) diff --git a/instill/resources/connector_blockchain.py b/instill/resources/connector_blockchain.py index 64f80d1..da15c5d 100644 --- a/instill/resources/connector_blockchain.py +++ b/instill/resources/connector_blockchain.py @@ -1,37 +1,21 @@ # pylint: disable=no-member,wrong-import-position,no-name-in-module -import json +from instill.resources import Component +from instill.resources.schema import helper, numbers_task_commit_input -import jsonschema -from instill.clients import InstillClient -from instill.protogen.vdp.pipeline.v1beta.pipeline_pb2 import Component -from instill.resources import const -from instill.resources.connector import Connector -from instill.resources.schema import helper, numbers, numbers_task_commit_input - - -class NumbersConnector(Connector): +class NumbersConnector(Component): """Numbers Connector""" - with open(f"{const.SPEC_PATH}/numbers_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - def __init__( self, - client: InstillClient, name: str, - config: numbers.NumbersProtocolBlockchainConnectorSpec, + inp: numbers_task_commit_input.Input, ) -> None: - definition = "connector-definitions/numbers" + definition_name = "connector-definitions/numbers" + component_type = "connector" - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, NumbersConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) + component = helper.construct_component_config( + component_type, definition_name, inp + ) - def create_component( - self, - name: str, - inp: numbers_task_commit_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) + super().__init__(name, component) diff --git a/instill/resources/connector_data.py b/instill/resources/connector_data.py index 950d281..4cc8c82 100644 --- a/instill/resources/connector_data.py +++ b/instill/resources/connector_data.py @@ -1,29 +1,17 @@ # pylint: disable=no-member,wrong-import-position,no-name-in-module -import json from typing import Union -import jsonschema - -from instill.clients import InstillClient -from instill.protogen.vdp.pipeline.v1beta.pipeline_pb2 import Component -from instill.resources import const -from instill.resources.connector import Connector +from instill.resources import Component from instill.resources.schema import ( - bigquery, bigquery_task_insert_input, - googlecloudstorage, googlecloudstorage_task_upload_input, - googlesearch, googlesearch_task_search_input, helper, - pinecone, pinecone_task_query_input, pinecone_task_upsert_input, - redis, redis_task_chat_history_retrieve_input, redis_task_chat_message_write_input, redis_task_chat_message_write_multi_modal_input, - restapi, restapi_task_delete_input, restapi_task_get_input, restapi_task_head_input, @@ -31,151 +19,89 @@ restapi_task_patch_input, restapi_task_post_input, restapi_task_put_input, - website, website_task_scrape_website_input, ) -class BigQueryConnector(Connector): +class BigQueryConnector(Component): """BigQuery Connector""" - with open( - f"{const.SPEC_PATH}/bigquery_definitions.json", "r", encoding="utf8" - ) as f: - definitions_jsonschema = json.loads(f.read()) - def __init__( self, - client: InstillClient, name: str, - config: bigquery.BigQueryConnectorSpec, + inp: bigquery_task_insert_input.Input, ) -> None: - definition = "connector-definitions/bigquery" + definition_name = "connector-definitions/bigquery" + component_type = "connector" - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, BigQueryConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) + component = helper.construct_component_config( + component_type, definition_name, inp + ) - def create_component( - self, - name: str, - inp: bigquery_task_insert_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) + super().__init__(name, component) -class PineconeConnector(Connector): +class PineconeConnector(Component): """Pinecone Connector""" - with open( - f"{const.SPEC_PATH}/pinecone_definitions.json", "r", encoding="utf8" - ) as f: - definitions_jsonschema = json.loads(f.read()) - def __init__( - self, - client: InstillClient, - name: str, - config: pinecone.PineconeConnectorSpec, - ) -> None: - definition = "connector-definitions/pinecone" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, PineconeConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) - - def create_component( self, name: str, inp: Union[ pinecone_task_query_input.Input, pinecone_task_upsert_input.Input, ], - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) + ) -> None: + definition_name = "connector-definitions/pinecone" + component_type = "connector" + component = helper.construct_component_config( + component_type, definition_name, inp + ) -class GoogleCloudStorageConnector(Connector): - """GoogleCloudStorage Connector""" + super().__init__(name, component) - with open( - f"{const.SPEC_PATH}/googlecloudstorage_definitions.json", "r", encoding="utf8" - ) as f: - definitions_jsonschema = json.loads(f.read()) + +class GoogleCloudStorageConnector(Component): + """GoogleCloudStorage Connector""" def __init__( self, - client: InstillClient, name: str, - config: googlecloudstorage.GoogleCloudStorageConnectorSpec, + inp: googlecloudstorage_task_upload_input.Input, ) -> None: - definition = "connector-definitions/gcs" + definition_name = "connector-definitions/gcs" + component_type = "connector" - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate( - config_spec, GoogleCloudStorageConnector.definitions_jsonschema + component = helper.construct_component_config( + component_type, definition_name, inp ) - super().__init__(client, name, definition, config_spec) - def create_component( - self, - name: str, - inp: googlecloudstorage_task_upload_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) + super().__init__(name, component) -class GoogleSearchConnector(Connector): +class GoogleSearchConnector(Component): """GoogleSearch Connector""" - with open( - f"{const.SPEC_PATH}/googlecloudstorage_definitions.json", "r", encoding="utf8" - ) as f: - definitions_jsonschema = json.loads(f.read()) - def __init__( self, - client: InstillClient, name: str, - config: googlesearch.GoogleSearchConnectorSpec, + inp: googlesearch_task_search_input.Input, ) -> None: - definition = "connector-definitions/google-search" + definition_name = "connector-definitions/google-search" + component_type = "connector" - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, GoogleSearchConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) + component = helper.construct_component_config( + component_type, definition_name, inp + ) - def create_component( - self, - name: str, - inp: googlesearch_task_search_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) + super().__init__(name, component) -class RedisConnector(Connector): +class RedisConnector(Component): """Redis Connector""" - with open(f"{const.SPEC_PATH}/redis_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - def __init__( - self, - client: InstillClient, - name: str, - config: redis.RedisConnectorResource, - ) -> None: - definition = "connector-definitions/redis" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, RedisConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) - - def create_component( self, name: str, inp: Union[ @@ -183,30 +109,21 @@ def create_component( redis_task_chat_message_write_input.Input, redis_task_chat_message_write_multi_modal_input.Input, ], - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) + ) -> None: + definition_name = "connector-definitions/redis" + component_type = "connector" + component = helper.construct_component_config( + component_type, definition_name, inp + ) -class RestAPIConnector(Connector): - """RestAPI Connector""" + super().__init__(name, component) - with open(f"{const.SPEC_PATH}/restapi_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) - def __init__( - self, - client: InstillClient, - name: str, - config: restapi.RESTAPIConnectorSpec, - ) -> None: - definition = "connector-definitions/restapi" - - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, RestAPIConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) +class RestAPIConnector(Component): + """RestAPI Connector""" - def create_component( + def __init__( self, name: str, inp: Union[ @@ -219,33 +136,30 @@ def create_component( restapi_task_patch_input.Input, restapi_task_put_input.Input, ], - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) + ) -> None: + definition_name = "connector-definitions/restapi" + component_type = "connector" + component = helper.construct_component_config( + component_type, definition_name, inp + ) -class WebsiteConnector(Connector): - """Website Connector""" + super().__init__(name, component) - with open(f"{const.SPEC_PATH}/website_definitions.json", "r", encoding="utf8") as f: - definitions_jsonschema = json.loads(f.read()) + +class WebsiteConnector(Component): + """Website Connector""" def __init__( self, - client: InstillClient, name: str, - config: website.WebsiteConnectorResource, + inp: website_task_scrape_website_input.Input, ) -> None: - definition = "connector-definitions/website" + definition_name = "connector-definitions/website" + component_type = "connector" - config_spec = helper.pop_default_and_to_dict(config) - jsonschema.validate(config_spec, WebsiteConnector.definitions_jsonschema) - super().__init__(client, name, definition, config_spec) + component = helper.construct_component_config( + component_type, definition_name, inp + ) - def create_component( - self, - name: str, - inp: website_task_scrape_website_input.Input, - ) -> Component: - config = helper.construct_component_config(inp) - return super()._create_component(name, config) + super().__init__(name, component) diff --git a/instill/resources/errors.py b/instill/resources/errors.py index 35c564e..823eef1 100644 --- a/instill/resources/errors.py +++ b/instill/resources/errors.py @@ -1,3 +1,8 @@ class WrongModeException(Exception): def __str__(self) -> str: return "Instill Model Connector mode error" + + +class ComponentTypeExection(Exception): + def __str__(self) -> str: + return "Component type not supported" diff --git a/instill/resources/operator.py b/instill/resources/operator.py index fdbc285..1d0a906 100644 --- a/instill/resources/operator.py +++ b/instill/resources/operator.py @@ -1,43 +1,107 @@ # pylint: disable=no-member,wrong-import-position -import instill.protogen.vdp.pipeline.v1beta.pipeline_pb2 as pipeline_pb +from typing import Union + +from instill.resources import Component from instill.resources.schema import ( - end_task_end_input, - end_task_end_metadata, + base64_task_decode_input, + base64_task_encode_input, helper, - start_task_start_metadata, + image_task_draw_classification_input, + image_task_draw_detection_input, + image_task_draw_instance_segmentation_input, + image_task_draw_keypoint_input, + image_task_draw_ocr_input, + image_task_draw_semantic_segmentation_input, + json_task_marshal_input, + json_task_unmarshal_input, + text_task_convert_to_text_input, + text_task_split_by_token_input, ) -def create_start_operator( - metadata_fields: start_task_start_metadata.Metadata, -) -> pipeline_pb.Component: - start_operator_component = pipeline_pb.Component() - start_operator_component.id = "start" - start_operator_component.resource_name = "" - start_operator_component.definition_name = "operator-definitions/start" +class Base64Operator(Component): + """Base64 Operator""" + + def __init__( + self, + name: str, + inp: Union[ + base64_task_encode_input.Input, + base64_task_decode_input.Input, + ], + ) -> None: + definition_name = "operator-definitions/base64" + component_type = "operator" + + component = helper.construct_component_config( + component_type, definition_name, inp + ) + + super().__init__(name, component) + + +class JSONOperator(Component): + """JSON Operator""" + + def __init__( + self, + name: str, + inp: Union[ + json_task_marshal_input.Input, + json_task_unmarshal_input.Input, + ], + ) -> None: + definition_name = "operator-definitions/json" + component_type = "operator" + + component = helper.construct_component_config( + component_type, definition_name, inp + ) + + super().__init__(name, component) + + +class ImageOperator(Component): + """Image Operator""" + + def __init__( + self, + name: str, + inp: Union[ + image_task_draw_classification_input.Input, + image_task_draw_detection_input.Input, + image_task_draw_instance_segmentation_input.Input, + image_task_draw_semantic_segmentation_input.Input, + image_task_draw_keypoint_input.Input, + image_task_draw_ocr_input.Input, + ], + ) -> None: + definition_name = "operator-definitions/image" + component_type = "operator" + + component = helper.construct_component_config( + component_type, definition_name, inp + ) - for metadata_key, metadata_val in metadata_fields.items(): - metadata_fields[metadata_key] = helper.pop_default_and_to_dict(metadata_val) # type: ignore - metadata = {"metadata": metadata_fields} - start_operator_component.configuration.update(metadata) # type: ignore + super().__init__(name, component) - return start_operator_component +class TextOperator(Component): + """Text Operator""" -def create_end_operator( - inp_fields: end_task_end_input.Input, - metadata_fields: end_task_end_metadata.Metadata, -) -> pipeline_pb.Component: - end_operator_component = pipeline_pb.Component() - end_operator_component.id = "end" - end_operator_component.resource_name = "" - end_operator_component.definition_name = "operator-definitions/end" + def __init__( + self, + name: str, + inp: Union[ + text_task_convert_to_text_input.Input, + text_task_split_by_token_input.Input, + ], + ) -> None: + definition_name = "operator-definitions/text" + component_type = "operator" - for metadata_key, metadata_val in metadata_fields.items(): - metadata_fields[metadata_key] = helper.pop_default_and_to_dict(metadata_val) # type: ignore - inp = {"input": inp_fields} - metadata = {"metadata": metadata_fields} - end_operator_component.configuration.update(inp) - end_operator_component.configuration.update(metadata) # type: ignore + component = helper.construct_component_config( + component_type, definition_name, inp + ) - return end_operator_component + super().__init__(name, component) diff --git a/instill/resources/schema/airbyte/OAuth2.py b/instill/resources/schema/airbyte/OAuth2.py index 9f76f83..fada4b8 100644 --- a/instill/resources/schema/airbyte/OAuth2.py +++ b/instill/resources/schema/airbyte/OAuth2.py @@ -16,6 +16,6 @@ class AuthType(Enum): class Field0: access_token: str refresh_token: str - auth_type: Optional[AuthType] = AuthType.OAuth2_0 + auth_type: AuthType = OAuth2.AuthType.OAuth2_0 client_id: Optional[str] = None client_secret: Optional[str] = None diff --git a/instill/resources/schema/airbyte/__init__.py b/instill/resources/schema/airbyte/__init__.py index f260e13..b054a8a 100644 --- a/instill/resources/schema/airbyte/__init__.py +++ b/instill/resources/schema/airbyte/__init__.py @@ -40,10 +40,10 @@ class AWSRegion(Enum): @dataclass class Amazonsqs: - destination: str queue_url: str region: AWSRegion access_key: Optional[str] = None + destination: str = 'airbyte-destination-amazon-sqs' message_body_key: Optional[str] = None message_delay: Optional[int] = None message_group_id: Optional[str] = None @@ -56,8 +56,8 @@ class CredentialsTitle(Enum): @dataclass class IAMRole: - credentials_title: CredentialsTitle role_arn: str + credentials_title: CredentialsTitle = CredentialsTitle.IAM_Role class CredentialsTitle1(Enum): @@ -68,7 +68,7 @@ class CredentialsTitle1(Enum): class IAMUser: aws_access_key_id: str aws_secret_access_key: str - credentials_title: CredentialsTitle1 + credentials_title: CredentialsTitle1 = CredentialsTitle1.IAM_User class CompressionCodecOptional(Enum): @@ -83,9 +83,9 @@ class FormatType(Enum): @dataclass class JSONLinesNewlineDelimitedJSON: format_type: FormatType - compression_codec: Optional[ - CompressionCodecOptional - ] = CompressionCodecOptional.UNCOMPRESSED + compression_codec: Optional[CompressionCodecOptional] = ( + CompressionCodecOptional.UNCOMPRESSED + ) class CompressionCodecOptional1(Enum): @@ -102,9 +102,9 @@ class FormatType1(Enum): @dataclass class ParquetColumnarStorage: format_type: FormatType1 - compression_codec: Optional[ - CompressionCodecOptional1 - ] = CompressionCodecOptional1.SNAPPY + compression_codec: Optional[CompressionCodecOptional1] = ( + CompressionCodecOptional1.SNAPPY + ) class ChooseHowToPartitionData(Enum): @@ -150,21 +150,21 @@ class S3BucketRegion(Enum): class Awsdatalake: bucket_name: str credentials: Union[IAMRole, IAMUser] - destination: str lakeformation_database_name: str region: S3BucketRegion aws_account_id: Optional[str] = None bucket_prefix: Optional[str] = None - format: Optional[ - Union[JSONLinesNewlineDelimitedJSON, ParquetColumnarStorage] - ] = None + destination: str = 'airbyte-destination-aws-datalake' + format: Optional[Union[JSONLinesNewlineDelimitedJSON, ParquetColumnarStorage]] = ( + None + ) glue_catalog_float_as_decimal: Optional[bool] = False lakeformation_database_default_tag_key: Optional[str] = None lakeformation_database_default_tag_values: Optional[str] = None lakeformation_governed_tables: Optional[bool] = False - partitioning: Optional[ - ChooseHowToPartitionData - ] = ChooseHowToPartitionData.NO_PARTITIONING + partitioning: Optional[ChooseHowToPartitionData] = ( + ChooseHowToPartitionData.NO_PARTITIONING + ) class NormalizationFlattening(Enum): @@ -175,24 +175,24 @@ class NormalizationFlattening(Enum): @dataclass class CSVCommaSeparatedValues: flattening: NormalizationFlattening - format_type: str + format_type: str = 'CSV' @dataclass class JSONLinesNewlineDelimitedJSON1: - format_type: str + format_type: str = 'JSONL' @dataclass class Azureblobstorage: azure_blob_storage_account_key: str azure_blob_storage_account_name: str - destination: str format: Union[CSVCommaSeparatedValues, JSONLinesNewlineDelimitedJSON1] azure_blob_storage_container_name: Optional[str] = None azure_blob_storage_endpoint_domain_name: Optional[str] = 'blob.core.windows.net' azure_blob_storage_output_buffer_size: Optional[int] = 5 azure_blob_storage_spill_size: Optional[int] = 500 + destination: str = 'airbyte-destination-azure-blob-storage' class DatasetLocation(Enum): @@ -244,9 +244,9 @@ class DatasetLocation(Enum): @dataclass class HMACKey: - credential_type: str hmac_key_access_id: str hmac_key_secret: str + credential_type: str = 'HMAC_KEY' class GCSTmpFilesAfterwardProcessing(Enum): @@ -259,15 +259,15 @@ class GCSStaging: credential: HMACKey gcs_bucket_name: str gcs_bucket_path: str - method: str - keep_files_in_gcs_bucket: Optional[ - GCSTmpFilesAfterwardProcessing - ] = GCSTmpFilesAfterwardProcessing.Delete_all_tmp_files_from_GCS + keep_files_in_gcs_bucket: Optional[GCSTmpFilesAfterwardProcessing] = ( + GCSTmpFilesAfterwardProcessing.Delete_all_tmp_files_from_GCS + ) + method: str = 'GCS Staging' @dataclass class StandardInserts: - method: str + method: str = 'Standard' class TransformationQueryRunType(Enum): @@ -279,27 +279,27 @@ class TransformationQueryRunType(Enum): class Bigquery: dataset_id: str dataset_location: DatasetLocation - destination: str project_id: str big_query_client_buffer_size_mb: Optional[int] = 15 credentials_json: Optional[str] = None + destination: str = 'airbyte-destination-bigquery' disable_type_dedupe: Optional[bool] = False loading_method: Optional[Union[GCSStaging, StandardInserts]] = None raw_data_dataset: Optional[str] = None - transformation_priority: Optional[ - TransformationQueryRunType - ] = TransformationQueryRunType.interactive + transformation_priority: Optional[TransformationQueryRunType] = ( + TransformationQueryRunType.interactive + ) @dataclass class Cassandra: address: str - destination: str keyspace: str password: str port: int username: str datacenter: Optional[str] = 'datacenter1' + destination: str = 'airbyte-destination-cassandra' replication: Optional[int] = 1 @@ -311,8 +311,8 @@ class Mode(Enum): class AzureOpenAI: api_base: str deployment: str - mode: Mode openai_key: str + mode: Mode = Mode.azure_openai class Mode1(Enum): @@ -321,8 +321,8 @@ class Mode1(Enum): @dataclass class OpenAI: - mode: Mode1 openai_key: str + mode: Mode1 = Mode1.openai class Mode2(Enum): @@ -332,7 +332,7 @@ class Mode2(Enum): @dataclass class Cohere: cohere_key: str - mode: Mode2 + mode: Mode2 = Mode2.cohere class Mode3(Enum): @@ -343,7 +343,7 @@ class Mode3(Enum): class FromField: dimensions: int field_name: str - mode: Mode3 + mode: Mode3 = Mode3.from_field class Mode4(Enum): @@ -352,7 +352,7 @@ class Mode4(Enum): @dataclass class Fake: - mode: Mode4 + mode: Mode4 = Mode4.fake class Mode5(Enum): @@ -363,8 +363,8 @@ class Mode5(Enum): class OpenAICompatible: base_url: str dimensions: int - mode: Mode5 api_key: Optional[str] = '' + mode: Mode5 = Mode5.openai_compatible model_name: Optional[str] = 'text-embedding-ada-002' @@ -374,7 +374,7 @@ class Mode6(Enum): @dataclass class ChromaDefaultEmbeddingFunction: - mode: Optional[Mode6] = Mode6.no_embedding + mode: Mode6 = Mode6.no_embedding class Mode7(Enum): @@ -384,7 +384,7 @@ class Mode7(Enum): @dataclass class PersistentClientMode: path: str - mode: Optional[Mode7] = Mode7.persistent_client + mode: Mode7 = Mode7.persistent_client class Mode8(Enum): @@ -396,7 +396,7 @@ class ClientServerMode: host: str port: int ssl: bool - mode: Optional[Mode8] = Mode8.http_client + mode: Mode8 = Mode8.http_client password: Optional[str] = '' username: Optional[str] = '' @@ -419,8 +419,8 @@ class Mode9(Enum): @dataclass class BySeparator: - mode: Mode9 keep_separator: Optional[bool] = False + mode: Mode9 = Mode9.separator separators: Optional[List[str]] = field( default_factory=lambda: ['"\\n\\n"', '"\\n"', '" "', '""'] ) @@ -432,7 +432,7 @@ class Mode10(Enum): @dataclass class ByMarkdownHeader: - mode: Mode10 + mode: Mode10 = Mode10.markdown split_level: Optional[int] = 1 @@ -462,7 +462,7 @@ class Mode11(Enum): @dataclass class ByProgrammingLanguage: language: Language - mode: Mode11 + mode: Mode11 = Mode11.code @dataclass @@ -481,7 +481,6 @@ class ProcessingConfigModel: @dataclass class Chroma: - destination: str embedding: Union[ AzureOpenAI, OpenAI, @@ -493,39 +492,40 @@ class Chroma: ] indexing: Indexing processing: ProcessingConfigModel + destination: str = 'airbyte-destination-chroma' omit_raw_text: Optional[bool] = False @dataclass class NoTunnel: - tunnel_method: str + tunnel_method: str = 'NO_TUNNEL' @dataclass class SSHKeyAuthentication: ssh_key: str tunnel_host: str - tunnel_method: str tunnel_port: int tunnel_user: str + tunnel_method: str = 'SSH_KEY_AUTH' @dataclass class PasswordAuthentication: tunnel_host: str - tunnel_method: str tunnel_port: int tunnel_user: str tunnel_user_password: str + tunnel_method: str = 'SSH_PASSWORD_AUTH' @dataclass class Clickhouse: database: str - destination: str host: str port: int username: str + destination: str = 'airbyte-destination-clickhouse' jdbc_url_params: Optional[str] = None password: Optional[str] = None ssl: Optional[bool] = False @@ -538,39 +538,39 @@ class Clickhouse: class Convex: access_key: str deployment_url: str - destination: str + destination: str = 'airbyte-destination-convex' @dataclass class Comma: - delimiter: str + delimiter: str = '\\u002c' @dataclass class Semicolon: - delimiter: str + delimiter: str = '\\u003b' @dataclass class Pipe: - delimiter: str + delimiter: str = '\\u007c' @dataclass class Tab: - delimiter: str + delimiter: str = '\\u0009' @dataclass class Space: - delimiter: str + delimiter: str = '\\u0020' @dataclass class Csv: - destination: str destination_path: str delimiter_type: Optional[Union[Comma, Semicolon, Pipe, Tab, Space]] = None + destination: str = 'airbyte-destination-csv' @dataclass @@ -578,15 +578,15 @@ class Cumulio: api_host: str api_key: str api_token: str - destination: str + destination: str = 'airbyte-destination-cumulio' @dataclass class Databend: database: str - destination: str host: str username: str + destination: str = 'airbyte-destination-databend' password: Optional[str] = None port: Optional[int] = 443 table: Optional[str] = 'default' @@ -594,17 +594,17 @@ class Databend: @dataclass class FieldRecommendedManagedTables: - data_source_type: str + data_source_type: str = 'MANAGED_TABLES_STORAGE' @dataclass class AmazonS3: - data_source_type: str s3_access_key_id: str s3_bucket_name: str s3_bucket_path: str s3_bucket_region: S3BucketRegion s3_secret_access_key: str + data_source_type: str = 'S3_STORAGE' file_name_pattern: Optional[str] = None @@ -613,8 +613,8 @@ class AzureBlobStorage: azure_blob_storage_account_name: str azure_blob_storage_container_name: str azure_blob_storage_sas_token: str - data_source_type: str azure_blob_storage_endpoint_domain_name: Optional[str] = 'blob.core.windows.net' + data_source_type: str = 'AZURE_BLOB_STORAGE' @dataclass @@ -624,9 +624,9 @@ class Databricks: databricks_http_path: str databricks_personal_access_token: str databricks_server_hostname: str - destination: str database: Optional[str] = None databricks_port: Optional[str] = '443' + destination: str = 'airbyte-destination-databricks' enable_schema_evolution: Optional[bool] = False purge_staging_data: Optional[bool] = True schema_: Optional[str] = 'default' @@ -635,18 +635,18 @@ class Databricks: @dataclass class Doris: database: str - destination: str host: str httpport: int queryport: int username: str + destination: str = 'airbyte-destination-doris' password: Optional[str] = None @dataclass class Duckdb: - destination: str destination_path: str + destination: str = 'airbyte-destination-duckdb' motherduck_api_key: Optional[str] = None schema_: Optional[str] = None @@ -683,10 +683,10 @@ class DynamoDBRegion(Enum): @dataclass class Dynamodb: access_key_id: str - destination: str dynamodb_region: DynamoDBRegion dynamodb_table_name_prefix: str secret_access_key: str + destination: str = 'airbyte-destination-dynamodb' dynamodb_endpoint: Optional[str] = '' @@ -726,57 +726,57 @@ class RandomSampling: @dataclass class Logging: logging_config: Union[FirstNEntries, EveryNThEntry, RandomSampling] - test_destination_type: str + test_destination_type: str = 'LOGGING' @dataclass class Silent: - test_destination_type: str + test_destination_type: str = 'SILENT' @dataclass class Throttled: millis_per_record: int - test_destination_type: str + test_destination_type: str = 'THROTTLED' @dataclass class Failing: num_messages: int - test_destination_type: str + test_destination_type: str = 'FAILING' @dataclass class E2etest: - destination: str test_destination: Union[Logging, Silent, Throttled, Failing] + destination: str = 'airbyte-destination-e2e-test' @dataclass class None1: - method: str + method: str = 'none' @dataclass class ApiKeySecret: apiKeyId: str apiKeySecret: str - method: str + method: str = 'secret' @dataclass class UsernamePassword: - method: str password: str username: str + method: str = 'basic' @dataclass class Elasticsearch: - destination: str endpoint: str authenticationMethod: Optional[Union[None1, ApiKeySecret, UsernamePassword]] = None ca_certificate: Optional[str] = None + destination: str = 'airbyte-destination-elasticsearch' tunnel_method: Optional[ Union[NoTunnel, SSHKeyAuthentication, PasswordAuthentication] ] = None @@ -785,37 +785,37 @@ class Elasticsearch: @dataclass class Exasol: - destination: str host: str port: int schema_: str username: str certificateFingerprint: Optional[str] = None + destination: str = 'airbyte-destination-exasol' jdbc_url_params: Optional[str] = None password: Optional[str] = None @dataclass class SQLInserts: - method: str + method: str = 'SQL' @dataclass class ExternalTableViaS3: aws_key_id: str aws_key_secret: str - method: str s3_bucket: str s3_region: str + method: str = 'S3' @dataclass class Firebolt: database: str - destination: str password: str username: str account: Optional[str] = None + destination: str = 'airbyte-destination-firebolt' engine: Optional[str] = None host: Optional[str] = None loading_method: Optional[Union[SQLInserts, ExternalTableViaS3]] = None @@ -823,9 +823,9 @@ class Firebolt: @dataclass class Firestore: - destination: str project_id: str credentials_json: Optional[str] = None + destination: str = 'airbyte-destination-firestore' class CredentialType(Enum): @@ -1035,7 +1035,6 @@ class GCSBucketRegion(Enum): @dataclass class Gcs: credential: HMACKey1 - destination: str format: Union[ AvroApacheAvro, CSVCommaSeparatedValues1, @@ -1044,6 +1043,7 @@ class Gcs: ] gcs_bucket_name: str gcs_bucket_path: str + destination: str = 'airbyte-destination-gcs' gcs_bucket_region: Optional[GCSBucketRegion] = GCSBucketRegion.us @@ -1057,8 +1057,8 @@ class AuthenticationViaGoogleOAuth: @dataclass class Googlesheets: credentials: AuthenticationViaGoogleOAuth - destination: str spreadsheet_id: str + destination: str = 'airbyte-destination-google-sheets' class CatalogType(Enum): @@ -1155,9 +1155,9 @@ class Iceberg: JdbcCatalogUseRelationalDatabase, RESTCatalog, ] - destination: str format_config: FileFormat storage_config: Union[S3, ServerManaged] + destination: str = 'airbyte-destination-iceberg' class ACKs(Enum): @@ -1235,7 +1235,6 @@ class Kafka: client_dns_lookup: ClientDNSLookup compression_type: CompressionType4 delivery_timeout_ms: int - destination: str enable_idempotence: bool linger_ms: str max_block_ms: str @@ -1250,6 +1249,7 @@ class Kafka: socket_connection_setup_timeout_ms: str topic_pattern: str client_id: Optional[str] = None + destination: str = 'airbyte-destination-kafka' sync_producer: Optional[bool] = False test_topic: Optional[str] = None @@ -1257,8 +1257,8 @@ class Kafka: @dataclass class Keen: api_key: str - destination: str project_id: str + destination: str = 'airbyte-destination-keen' infer_timestamp: Optional[bool] = True @@ -1266,11 +1266,11 @@ class Keen: class Kinesis: accessKey: str bufferSize: int - destination: str endpoint: str privateKey: str region: str shardCount: int + destination: str = 'airbyte-destination-kinesis' class Mode12(Enum): @@ -1280,7 +1280,7 @@ class Mode12(Enum): @dataclass class OpenAI1: openai_key: str - mode: Optional[Mode12] = Mode12.openai + mode: Mode12 = Mode12.openai class Mode13(Enum): @@ -1289,7 +1289,7 @@ class Mode13(Enum): @dataclass class Fake1: - mode: Optional[Mode13] = Mode13.fake + mode: Mode13 = Mode13.fake class Mode14(Enum): @@ -1301,7 +1301,7 @@ class Pinecone: index: str pinecone_environment: str pinecone_key: str - mode: Optional[Mode14] = Mode14.pinecone + mode: Mode14 = Mode14.pinecone class Mode15(Enum): @@ -1311,7 +1311,7 @@ class Mode15(Enum): @dataclass class DocArrayHnswSearch: destination_path: str - mode: Optional[Mode15] = Mode15.DocArrayHnswSearch + mode: Mode15 = Mode15.DocArrayHnswSearch class Mode16(Enum): @@ -1322,7 +1322,7 @@ class Mode16(Enum): class ChromaLocalPersistance: destination_path: str collection_name: Optional[str] = 'langchain' - mode: Optional[Mode16] = Mode16.chroma_local + mode: Mode16 = Mode16.chroma_local @dataclass @@ -1334,25 +1334,25 @@ class ProcessingConfigModel1: @dataclass class Langchain: - destination: str embedding: Union[OpenAI1, Fake1] indexing: Union[Pinecone, DocArrayHnswSearch, ChromaLocalPersistance] processing: ProcessingConfigModel1 + destination: str = 'airbyte-destination-langchain' @dataclass class Localjson: - destination: str destination_path: str + destination: str = 'airbyte-destination-local-json' @dataclass class Mariadbcolumnstore: database: str - destination: str host: str port: int username: str + destination: str = 'airbyte-destination-mariadb-columnstore' jdbc_url_params: Optional[str] = None password: Optional[str] = None tunnel_method: Optional[ @@ -1362,9 +1362,9 @@ class Mariadbcolumnstore: @dataclass class Meilisearch: - destination: str host: str api_key: Optional[str] = None + destination: str = 'airbyte-destination-meilisearch' class Mode17(Enum): @@ -1373,8 +1373,8 @@ class Mode17(Enum): @dataclass class OpenAI2: - mode: Mode17 openai_key: str + mode: Mode17 = Mode17.openai class Mode18(Enum): @@ -1384,7 +1384,7 @@ class Mode18(Enum): @dataclass class Cohere1: cohere_key: str - mode: Mode18 + mode: Mode18 = Mode18.cohere class Mode19(Enum): @@ -1393,7 +1393,7 @@ class Mode19(Enum): @dataclass class Fake2: - mode: Mode19 + mode: Mode19 = Mode19.fake class Mode20(Enum): @@ -1404,8 +1404,8 @@ class Mode20(Enum): class AzureOpenAI1: api_base: str deployment: str - mode: Mode20 openai_key: str + mode: Mode20 = Mode20.azure_openai class Mode21(Enum): @@ -1416,8 +1416,8 @@ class Mode21(Enum): class OpenAICompatible1: base_url: str dimensions: int - mode: Mode21 api_key: Optional[str] = '' + mode: Mode21 = Mode21.openai_compatible model_name: Optional[str] = 'text-embedding-ada-002' @@ -1427,8 +1427,8 @@ class Mode22(Enum): @dataclass class APIToken: - mode: Mode22 token: str + mode: Mode22 = Mode22.token class Mode23(Enum): @@ -1437,9 +1437,9 @@ class Mode23(Enum): @dataclass class UsernamePassword1: - mode: Mode23 password: str username: str + mode: Mode23 = Mode23.username_password class Mode24(Enum): @@ -1448,7 +1448,7 @@ class Mode24(Enum): @dataclass class NoAuth: - mode: Mode24 + mode: Mode24 = Mode24.no_auth @dataclass @@ -1467,8 +1467,8 @@ class Mode25(Enum): @dataclass class BySeparator1: - mode: Mode25 keep_separator: Optional[bool] = False + mode: Mode25 = Mode25.separator separators: Optional[List[str]] = field( default_factory=lambda: ['"\\n\\n"', '"\\n"', '" "', '""'] ) @@ -1480,7 +1480,7 @@ class Mode26(Enum): @dataclass class ByMarkdownHeader1: - mode: Mode26 + mode: Mode26 = Mode26.markdown split_level: Optional[int] = 1 @@ -1491,7 +1491,7 @@ class Mode27(Enum): @dataclass class ByProgrammingLanguage1: language: Language - mode: Mode27 + mode: Mode27 = Mode27.code @dataclass @@ -1510,23 +1510,23 @@ class ProcessingConfigModel2: @dataclass class Milvus: - destination: str embedding: Union[OpenAI2, Cohere1, Fake2, AzureOpenAI1, OpenAICompatible1] indexing: Indexing1 processing: ProcessingConfigModel2 + destination: str = 'airbyte-destination-milvus' omit_raw_text: Optional[bool] = False @dataclass class None_1: - authorization: str + authorization: str = 'none' @dataclass class LoginPassword: - authorization: str password: str username: str + authorization: str = 'login/password' class Instance(Enum): @@ -1566,7 +1566,7 @@ class MongoDBAtlas: class Mongodb: auth_type: Union[None_1, LoginPassword] database: str - destination: str + destination: str = 'airbyte-destination-mongodb' instance_type: Optional[ Union[StandaloneMongoDbInstance, ReplicaSet, MongoDBAtlas] ] = None @@ -1588,13 +1588,13 @@ class Mqtt: broker_port: int clean_session: bool connect_timeout: int - destination: str message_qos: MessageQoS message_retained: bool publisher_sync: bool topic_pattern: str use_tls: bool client: Optional[str] = None + destination: str = 'airbyte-destination-mqtt' password: Optional[str] = None topic_test: Optional[str] = None username: Optional[str] = None @@ -1606,7 +1606,7 @@ class SslMethod(Enum): @dataclass class Unencrypted: - ssl_method: SslMethod + ssl_method: SslMethod = SslMethod.unencrypted class SslMethod1(Enum): @@ -1615,7 +1615,7 @@ class SslMethod1(Enum): @dataclass class EncryptedTrustServerCertificate: - ssl_method: SslMethod1 + ssl_method: SslMethod1 = SslMethod1.encrypted_trust_server_certificate class SslMethod2(Enum): @@ -1624,18 +1624,18 @@ class SslMethod2(Enum): @dataclass class EncryptedVerifyCertificate: - ssl_method: SslMethod2 hostNameInCertificate: Optional[str] = None + ssl_method: SslMethod2 = SslMethod2.encrypted_verify_certificate @dataclass class Mssql: database: str - destination: str host: str port: int schema_: str username: str + destination: str = 'airbyte-destination-mssql' jdbc_url_params: Optional[str] = None password: Optional[str] = None ssl_method: Optional[ @@ -1649,10 +1649,10 @@ class Mssql: @dataclass class Mysql: database: str - destination: str host: str port: int username: str + destination: str = 'airbyte-destination-mysql' jdbc_url_params: Optional[str] = None password: Optional[str] = None ssl: Optional[bool] = True @@ -1667,7 +1667,7 @@ class EncryptionMethod(Enum): @dataclass class Unencrypted1: - encryption_method: EncryptionMethod + encryption_method: EncryptionMethod = EncryptionMethod.unencrypted class EncryptionAlgorithm(Enum): @@ -1682,8 +1682,8 @@ class EncryptionMethod1(Enum): @dataclass class NativeNetworkEncryptionNNE: - encryption_method: EncryptionMethod1 encryption_algorithm: Optional[EncryptionAlgorithm] = EncryptionAlgorithm.AES256 + encryption_method: EncryptionMethod1 = EncryptionMethod1.client_nne class EncryptionMethod2(Enum): @@ -1692,17 +1692,19 @@ class EncryptionMethod2(Enum): @dataclass class TLSEncryptedVerifyCertificate: - encryption_method: EncryptionMethod2 ssl_certificate: str + encryption_method: EncryptionMethod2 = ( + EncryptionMethod2.encrypted_verify_certificate + ) @dataclass class Oracle: - destination: str host: str port: int sid: str username: str + destination: str = 'airbyte-destination-oracle' encryption: Optional[ Union[Unencrypted1, NativeNetworkEncryptionNNE, TLSEncryptedVerifyCertificate] ] = None @@ -1720,8 +1722,8 @@ class Mode28(Enum): @dataclass class OpenAI3: - mode: Mode28 openai_key: str + mode: Mode28 = Mode28.openai class Mode29(Enum): @@ -1731,7 +1733,7 @@ class Mode29(Enum): @dataclass class Cohere2: cohere_key: str - mode: Mode29 + mode: Mode29 = Mode29.cohere class Mode30(Enum): @@ -1740,7 +1742,7 @@ class Mode30(Enum): @dataclass class Fake3: - mode: Mode30 + mode: Mode30 = Mode30.fake class Mode31(Enum): @@ -1751,8 +1753,8 @@ class Mode31(Enum): class AzureOpenAI2: api_base: str deployment: str - mode: Mode31 openai_key: str + mode: Mode31 = Mode31.azure_openai class Mode32(Enum): @@ -1763,8 +1765,8 @@ class Mode32(Enum): class OpenAICompatible2: base_url: str dimensions: int - mode: Mode32 api_key: Optional[str] = '' + mode: Mode32 = Mode32.openai_compatible model_name: Optional[str] = 'text-embedding-ada-002' @@ -1781,8 +1783,8 @@ class Mode33(Enum): @dataclass class BySeparator2: - mode: Mode33 keep_separator: Optional[bool] = False + mode: Mode33 = Mode33.separator separators: Optional[List[str]] = field( default_factory=lambda: ['"\\n\\n"', '"\\n"', '" "', '""'] ) @@ -1794,7 +1796,7 @@ class Mode34(Enum): @dataclass class ByMarkdownHeader2: - mode: Mode34 + mode: Mode34 = Mode34.markdown split_level: Optional[int] = 1 @@ -1805,7 +1807,7 @@ class Mode35(Enum): @dataclass class ByProgrammingLanguage2: language: Language - mode: Mode35 + mode: Mode35 = Mode35.code @dataclass @@ -1824,10 +1826,10 @@ class ProcessingConfigModel3: @dataclass class Pinecone1: - destination: str embedding: Union[OpenAI3, Cohere2, Fake3, AzureOpenAI2, OpenAICompatible2] indexing: Indexing2 processing: ProcessingConfigModel3 + destination: str = 'airbyte-destination-pinecone' omit_raw_text: Optional[bool] = False @@ -1837,7 +1839,7 @@ class Mode36(Enum): @dataclass class Disable: - mode: Mode36 + mode: Mode36 = Mode36.disable class Mode37(Enum): @@ -1846,7 +1848,7 @@ class Mode37(Enum): @dataclass class Allow: - mode: Mode37 + mode: Mode37 = Mode37.allow class Mode38(Enum): @@ -1855,7 +1857,7 @@ class Mode38(Enum): @dataclass class Prefer: - mode: Mode38 + mode: Mode38 = Mode38.prefer class Mode39(Enum): @@ -1864,7 +1866,7 @@ class Mode39(Enum): @dataclass class Require: - mode: Mode39 + mode: Mode39 = Mode39.require class Mode40(Enum): @@ -1874,8 +1876,8 @@ class Mode40(Enum): @dataclass class VerifyCa: ca_certificate: str - mode: Mode40 client_key_password: Optional[str] = None + mode: Mode40 = Mode40.verify_ca class Mode41(Enum): @@ -1887,24 +1889,24 @@ class VerifyFull: ca_certificate: str client_certificate: str client_key: str - mode: Mode41 client_key_password: Optional[str] = None + mode: Mode41 = Mode41.verify_full @dataclass class Postgres: database: str - destination: str host: str port: int schema_: str username: str + destination: str = 'airbyte-destination-postgres' jdbc_url_params: Optional[str] = None password: Optional[str] = None ssl: Optional[bool] = False - ssl_mode: Optional[ - Union[Disable, Allow, Prefer, Require, VerifyCa, VerifyFull] - ] = None + ssl_mode: Optional[Union[Disable, Allow, Prefer, Require, VerifyCa, VerifyFull]] = ( + None + ) tunnel_method: Optional[ Union[NoTunnel, SSHKeyAuthentication, PasswordAuthentication] ] = None @@ -1914,13 +1916,13 @@ class Postgres: class Pubsub: batching_enabled: bool credentials_json: str - destination: str ordering_enabled: bool project_id: str topic_id: str batching_delay_threshold: Optional[int] = 1 batching_element_count_threshold: Optional[int] = 1 batching_request_bytes_threshold: Optional[int] = 1 + destination: str = 'airbyte-destination-pubsub' class CompressionType5(Enum): @@ -1944,7 +1946,6 @@ class Pulsar: block_if_queue_full: bool brokers: str compression_type: CompressionType5 - destination: str max_pending_messages: int max_pending_messages_across_partitions: int send_timeout_ms: int @@ -1953,6 +1954,7 @@ class Pulsar: topic_tenant: str topic_type: TopicType use_tls: bool + destination: str = 'airbyte-destination-pulsar' producer_name: Optional[str] = None producer_sync: Optional[bool] = False topic_test: Optional[str] = None @@ -1964,8 +1966,8 @@ class Mode42(Enum): @dataclass class OpenAI4: - mode: Mode42 openai_key: str + mode: Mode42 = Mode42.openai class Mode43(Enum): @@ -1975,7 +1977,7 @@ class Mode43(Enum): @dataclass class Cohere3: cohere_key: str - mode: Mode43 + mode: Mode43 = Mode43.cohere class Mode44(Enum): @@ -1984,7 +1986,7 @@ class Mode44(Enum): @dataclass class Fake4: - mode: Mode44 + mode: Mode44 = Mode44.fake class Mode45(Enum): @@ -1995,8 +1997,8 @@ class Mode45(Enum): class AzureOpenAI3: api_base: str deployment: str - mode: Mode45 openai_key: str + mode: Mode45 = Mode45.azure_openai class Mode46(Enum): @@ -2007,8 +2009,8 @@ class Mode46(Enum): class OpenAICompatible3: base_url: str dimensions: int - mode: Mode46 api_key: Optional[str] = '' + mode: Mode46 = Mode46.openai_compatible model_name: Optional[str] = 'text-embedding-ada-002' @@ -2019,7 +2021,7 @@ class Mode47(Enum): @dataclass class ApiKeyAuth: api_key: str - mode: Optional[Mode47] = Mode47.api_key_auth + mode: Mode47 = Mode47.api_key_auth class Mode48(Enum): @@ -2028,7 +2030,7 @@ class Mode48(Enum): @dataclass class NoAuth1: - mode: Optional[Mode48] = Mode48.no_auth + mode: Mode48 = Mode48.no_auth class DistanceMetric(Enum): @@ -2053,8 +2055,8 @@ class Mode49(Enum): @dataclass class BySeparator3: - mode: Mode49 keep_separator: Optional[bool] = False + mode: Mode49 = Mode49.separator separators: Optional[List[str]] = field( default_factory=lambda: ['"\\n\\n"', '"\\n"', '" "', '""'] ) @@ -2066,7 +2068,7 @@ class Mode50(Enum): @dataclass class ByMarkdownHeader3: - mode: Mode50 + mode: Mode50 = Mode50.markdown split_level: Optional[int] = 1 @@ -2077,7 +2079,7 @@ class Mode51(Enum): @dataclass class ByProgrammingLanguage3: language: Language - mode: Mode51 + mode: Mode51 = Mode51.code @dataclass @@ -2096,10 +2098,10 @@ class ProcessingConfigModel4: @dataclass class Qdrant: - destination: str embedding: Union[OpenAI4, Cohere3, Fake4, AzureOpenAI3, OpenAICompatible3] indexing: Indexing3 processing: ProcessingConfigModel4 + destination: str = 'airbyte-destination-qdrant' omit_raw_text: Optional[bool] = False @@ -2232,22 +2234,22 @@ class JSONLinesNewlineDelimitedJSON3: class R2: access_key_id: str account_id: str - destination: str format: Union[ AvroApacheAvro1, CSVCommaSeparatedValues2, JSONLinesNewlineDelimitedJSON3 ] s3_bucket_name: str s3_bucket_path: str secret_access_key: str + destination: str = 'airbyte-destination-r2' file_name_pattern: Optional[str] = None s3_path_format: Optional[str] = None @dataclass class Rabbitmq: - destination: str host: str routing_key: str + destination: str = 'airbyte-destination-rabbitmq' exchange: Optional[str] = None password: Optional[str] = None port: Optional[int] = None @@ -2266,7 +2268,7 @@ class Mode52(Enum): @dataclass class Disable1: - mode: Mode52 + mode: Mode52 = Mode52.disable class Mode53(Enum): @@ -2278,17 +2280,17 @@ class VerifyFull1: ca_certificate: str client_certificate: str client_key: str - mode: Mode53 client_key_password: Optional[str] = None + mode: Mode53 = Mode53.verify_full @dataclass class Redis: cache_type: CacheType - destination: str host: str port: int username: str + destination: str = 'airbyte-destination-redis' password: Optional[str] = None ssl: Optional[bool] = False ssl_mode: Optional[Union[Disable1, VerifyFull1]] = None @@ -2311,8 +2313,8 @@ class Redpanda: bootstrap_servers: str buffer_memory: str compression_type: CompressionType10 - destination: str retries: int + destination: str = 'airbyte-destination-redpanda' socket_connection_setup_timeout_max_ms: Optional[int] = None socket_connection_setup_timeout_ms: Optional[int] = None topic_num_partitions: Optional[int] = None @@ -2325,7 +2327,7 @@ class EncryptionType(Enum): @dataclass class NoEncryption: - encryption_type: EncryptionType + encryption_type: EncryptionType = EncryptionType.none class EncryptionType1(Enum): @@ -2334,7 +2336,7 @@ class EncryptionType1(Enum): @dataclass class AESCBCEnvelopeEncryption: - encryption_type: EncryptionType1 + encryption_type: EncryptionType1 = EncryptionType1.aes_cbc_envelope key_encrypting_key: Optional[str] = None @@ -2368,7 +2370,6 @@ class S3BucketRegion3(Enum): @dataclass class AWSS3Staging: access_key_id: str - method: str s3_bucket_name: str s3_bucket_region: S3BucketRegion3 secret_access_key: str @@ -2377,24 +2378,25 @@ class AWSS3Staging: ) file_buffer_count: Optional[int] = 10 file_name_pattern: Optional[str] = None + method: str = 'S3 Staging' purge_staging_data: Optional[bool] = True s3_bucket_path: Optional[str] = None @dataclass class Standard: - method: str + method: str = 'Standard' @dataclass class Redshift: database: str - destination: str host: str password: str port: int schema_: str username: str + destination: str = 'airbyte-destination-redshift' jdbc_url_params: Optional[str] = None raw_data_schema: Optional[str] = None tunnel_method: Optional[ @@ -2407,9 +2409,9 @@ class Redshift: @dataclass class Rockset: api_key: str - destination: str workspace: str api_server: Optional[str] = 'https://api.rs2.usw2.rockset.com' + destination: str = 'airbyte-destination-rockset' class CompressionType11(Enum): @@ -2478,7 +2480,6 @@ class S3BucketRegion4(Enum): @dataclass class S3glue: - destination: str format: JSONLinesNewlineDelimitedJSON4 glue_database: str glue_serialization_library: SerializationLibrary @@ -2486,6 +2487,7 @@ class S3glue: s3_bucket_path: str s3_bucket_region: S3BucketRegion4 access_key_id: Optional[str] = None + destination: str = 'airbyte-destination-s3-glue' file_name_pattern: Optional[str] = None s3_endpoint: Optional[str] = '' s3_path_format: Optional[str] = None @@ -2635,7 +2637,6 @@ class ParquetColumnarStorage2: @dataclass class S31: - destination: str format: Union[ AvroApacheAvro2, CSVCommaSeparatedValues3, @@ -2646,6 +2647,7 @@ class S31: s3_bucket_path: str s3_bucket_region: S3BucketRegion4 access_key_id: Optional[str] = None + destination: str = 'airbyte-destination-s3' file_name_pattern: Optional[str] = None s3_endpoint: Optional[str] = '' s3_path_format: Optional[str] = None @@ -2655,11 +2657,11 @@ class S31: @dataclass class Scylla: address: str - destination: str keyspace: str password: str port: int username: str + destination: str = 'airbyte-destination-scylla' replication: Optional[int] = 1 @@ -2667,20 +2669,20 @@ class Scylla: class Selectdb: cluster_name: str database: str - destination: str jdbc_url: str load_url: str password: str user_name: str + destination: str = 'airbyte-destination-selectdb' @dataclass class Sftpjson: - destination: str destination_path: str host: str password: str username: str + destination: str = 'airbyte-destination-sftp-json' port: Optional[int] = 22 @@ -2691,7 +2693,7 @@ class AuthType(Enum): @dataclass class KeyPairAuthentication: private_key: str - auth_type: Optional[AuthType] = AuthType.Key_Pair_Authentication + auth_type: AuthType = AuthType.Key_Pair_Authentication private_key_password: Optional[str] = None @@ -2702,13 +2704,12 @@ class AuthType1(Enum): @dataclass class UsernameAndPassword: password: str - auth_type: Optional[AuthType1] = AuthType1.Username_and_Password + auth_type: AuthType1 = AuthType1.Username_and_Password @dataclass class Snowflake: database: str - destination: str host: str role: str schema_: str @@ -2717,6 +2718,7 @@ class Snowflake: credentials: Optional[ Union[OAuth2.Field0, KeyPairAuthentication, UsernameAndPassword] ] = None + destination: str = 'airbyte-destination-snowflake' disable_type_dedupe: Optional[bool] = False jdbc_url_params: Optional[str] = None raw_data_schema: Optional[str] = None @@ -2724,8 +2726,8 @@ class Snowflake: @dataclass class Sqlite: - destination: str destination_path: str + destination: str = 'airbyte-destination-sqlite' class ObjectStoreType(Enum): @@ -2761,12 +2763,12 @@ class AmazonS31: class Starburstgalaxy: accept_terms: bool catalog: str - destination: str password: str server_hostname: str staging_object_store: AmazonS31 username: str catalog_schema: Optional[str] = 'public' + destination: str = 'airbyte-destination-starburst-galaxy' port: Optional[str] = '443' purge_staging_table: Optional[bool] = True @@ -2777,7 +2779,7 @@ class Mode54(Enum): @dataclass class Disable2: - mode: Mode54 + mode: Mode54 = Mode54.disable class Mode55(Enum): @@ -2786,7 +2788,7 @@ class Mode55(Enum): @dataclass class Allow1: - mode: Mode55 + mode: Mode55 = Mode55.allow class Mode56(Enum): @@ -2795,7 +2797,7 @@ class Mode56(Enum): @dataclass class Prefer1: - mode: Mode56 + mode: Mode56 = Mode56.prefer class Mode57(Enum): @@ -2804,7 +2806,7 @@ class Mode57(Enum): @dataclass class Require1: - mode: Mode57 + mode: Mode57 = Mode57.require class Mode58(Enum): @@ -2813,8 +2815,8 @@ class Mode58(Enum): @dataclass class VerifyCa1: - mode: Mode58 ssl_ca_certificate: str + mode: Mode58 = Mode58.verify_ca class Mode59(Enum): @@ -2823,15 +2825,15 @@ class Mode59(Enum): @dataclass class VerifyFull2: - mode: Mode59 ssl_ca_certificate: str + mode: Mode59 = Mode59.verify_full @dataclass class Teradata: - destination: str host: str username: str + destination: str = 'airbyte-destination-teradata' jdbc_url_params: Optional[str] = None password: Optional[str] = None schema_: Optional[str] = 'airbyte_td' @@ -2844,10 +2846,10 @@ class Teradata: @dataclass class Tidb: database: str - destination: str host: str port: int username: str + destination: str = 'airbyte-destination-tidb' jdbc_url_params: Optional[str] = None password: Optional[str] = '' ssl: Optional[bool] = False @@ -2859,16 +2861,16 @@ class Tidb: @dataclass class Timeplus: apikey: str - destination: str endpoint: str + destination: str = 'airbyte-destination-timeplus' @dataclass class Typesense: api_key: str - destination: str host: str batch_size: Optional[int] = None + destination: str = 'airbyte-destination-typesense' port: Optional[str] = None protocol: Optional[str] = None @@ -2876,11 +2878,11 @@ class Typesense: @dataclass class Vertica: database: str - destination: str host: str port: int schema_: str username: str + destination: str = 'airbyte-destination-vertica' jdbc_url_params: Optional[str] = None password: Optional[str] = None tunnel_method: Optional[ @@ -2894,7 +2896,7 @@ class Mode60(Enum): @dataclass class NoExternalEmbedding: - mode: Mode60 + mode: Mode60 = Mode60.no_embedding class Mode61(Enum): @@ -2905,8 +2907,8 @@ class Mode61(Enum): class AzureOpenAI4: api_base: str deployment: str - mode: Mode61 openai_key: str + mode: Mode61 = Mode61.azure_openai class Mode62(Enum): @@ -2915,8 +2917,8 @@ class Mode62(Enum): @dataclass class OpenAI5: - mode: Mode62 openai_key: str + mode: Mode62 = Mode62.openai class Mode63(Enum): @@ -2926,7 +2928,7 @@ class Mode63(Enum): @dataclass class Cohere4: cohere_key: str - mode: Mode63 + mode: Mode63 = Mode63.cohere class Mode64(Enum): @@ -2937,7 +2939,7 @@ class Mode64(Enum): class FromField1: dimensions: int field_name: str - mode: Mode64 + mode: Mode64 = Mode64.from_field class Mode65(Enum): @@ -2946,7 +2948,7 @@ class Mode65(Enum): @dataclass class Fake5: - mode: Mode65 + mode: Mode65 = Mode65.fake class Mode66(Enum): @@ -2957,8 +2959,8 @@ class Mode66(Enum): class OpenAICompatible4: base_url: str dimensions: int - mode: Mode66 api_key: Optional[str] = '' + mode: Mode66 = Mode66.openai_compatible model_name: Optional[str] = 'text-embedding-ada-002' @@ -2974,8 +2976,8 @@ class Mode67(Enum): @dataclass class APIToken1: - mode: Mode67 token: str + mode: Mode67 = Mode67.token class Mode68(Enum): @@ -2984,9 +2986,9 @@ class Mode68(Enum): @dataclass class UsernamePassword2: - mode: Mode68 password: str username: str + mode: Mode68 = Mode68.username_password class Mode69(Enum): @@ -2995,7 +2997,7 @@ class Mode69(Enum): @dataclass class NoAuthentication: - mode: Mode69 + mode: Mode69 = Mode69.no_auth class DefaultVectorizer(Enum): @@ -3025,8 +3027,8 @@ class Mode70(Enum): @dataclass class BySeparator4: - mode: Mode70 keep_separator: Optional[bool] = False + mode: Mode70 = Mode70.separator separators: Optional[List[str]] = field( default_factory=lambda: ['"\\n\\n"', '"\\n"', '" "', '""'] ) @@ -3038,7 +3040,7 @@ class Mode71(Enum): @dataclass class ByMarkdownHeader4: - mode: Mode71 + mode: Mode71 = Mode71.markdown split_level: Optional[int] = 1 @@ -3049,7 +3051,7 @@ class Mode72(Enum): @dataclass class ByProgrammingLanguage4: language: Language - mode: Mode72 + mode: Mode72 = Mode72.code @dataclass @@ -3068,7 +3070,6 @@ class ProcessingConfigModel5: @dataclass class Weaviate: - destination: str embedding: Union[ NoExternalEmbedding, AzureOpenAI4, @@ -3080,6 +3081,7 @@ class Weaviate: ] indexing: Indexing4 processing: ProcessingConfigModel5 + destination: str = 'airbyte-destination-weaviate' omit_raw_text: Optional[bool] = False @@ -3087,26 +3089,26 @@ class Weaviate: class Xata: api_key: str db_url: str - destination: str + destination: str = 'airbyte-destination-xata' @dataclass class Yugabytedb: database: str - destination: str host: str port: int schema_: str username: str + destination: str = 'airbyte-destination-yugabytedb' jdbc_url_params: Optional[str] = None password: Optional[str] = None @dataclass class Airbytedevmatecloud: - destination: str privateKey: str streamId: str + destination: str = 'airbyte-devmate-cloud' Destination = Union[ diff --git a/instill/resources/schema/archetypeai.py b/instill/resources/schema/archetypeai.py new file mode 100644 index 0000000..6db7199 --- /dev/null +++ b/instill/resources/schema/archetypeai.py @@ -0,0 +1,11 @@ +# generated by datamodel-codegen: +# filename: archetypeai_definitions.json + +from __future__ import annotations + +from dataclasses import dataclass + + +@dataclass +class ArchetypeAIConnectorSpecification: + api_key: str diff --git a/instill/resources/schema/archetypeai_task_describe_input.py b/instill/resources/schema/archetypeai_task_describe_input.py new file mode 100644 index 0000000..cd3190d --- /dev/null +++ b/instill/resources/schema/archetypeai_task_describe_input.py @@ -0,0 +1,13 @@ +# generated by datamodel-codegen: +# filename: archetypeai_task_describe_input.json + +from __future__ import annotations + +from dataclasses import dataclass +from typing import List + + +@dataclass +class Input: + query: str + file_ids: List[str] diff --git a/instill/resources/schema/archetypeai_task_describe_output.py b/instill/resources/schema/archetypeai_task_describe_output.py new file mode 100644 index 0000000..093ea86 --- /dev/null +++ b/instill/resources/schema/archetypeai_task_describe_output.py @@ -0,0 +1,19 @@ +# generated by datamodel-codegen: +# filename: archetypeai_task_describe_output.json + +from __future__ import annotations + +from dataclasses import dataclass +from typing import List + + +@dataclass +class FrameDescription: + frame_id: int + timestamp: float + description: str + + +@dataclass +class Output: + descriptions: List[FrameDescription] diff --git a/instill/resources/schema/archetypeai_task_summarize_input.py b/instill/resources/schema/archetypeai_task_summarize_input.py new file mode 100644 index 0000000..acea662 --- /dev/null +++ b/instill/resources/schema/archetypeai_task_summarize_input.py @@ -0,0 +1,13 @@ +# generated by datamodel-codegen: +# filename: archetypeai_task_summarize_input.json + +from __future__ import annotations + +from dataclasses import dataclass +from typing import List + + +@dataclass +class Input: + query: str + file_ids: List[str] diff --git a/instill/resources/schema/archetypeai_task_summarize_output.py b/instill/resources/schema/archetypeai_task_summarize_output.py new file mode 100644 index 0000000..a1cb91d --- /dev/null +++ b/instill/resources/schema/archetypeai_task_summarize_output.py @@ -0,0 +1,11 @@ +# generated by datamodel-codegen: +# filename: archetypeai_task_summarize_output.json + +from __future__ import annotations + +from dataclasses import dataclass + + +@dataclass +class Output: + response: str diff --git a/instill/resources/schema/archetypeai_task_upload_file_input.py b/instill/resources/schema/archetypeai_task_upload_file_input.py new file mode 100644 index 0000000..cfe8b5a --- /dev/null +++ b/instill/resources/schema/archetypeai_task_upload_file_input.py @@ -0,0 +1,11 @@ +# generated by datamodel-codegen: +# filename: archetypeai_task_upload_file_input.json + +from __future__ import annotations + +from dataclasses import dataclass + + +@dataclass +class Input: + file: str diff --git a/instill/resources/schema/archetypeai_task_upload_file_output.py b/instill/resources/schema/archetypeai_task_upload_file_output.py new file mode 100644 index 0000000..2dd1e33 --- /dev/null +++ b/instill/resources/schema/archetypeai_task_upload_file_output.py @@ -0,0 +1,11 @@ +# generated by datamodel-codegen: +# filename: archetypeai_task_upload_file_output.json + +from __future__ import annotations + +from dataclasses import dataclass + + +@dataclass +class Output: + file_id: str diff --git a/instill/resources/schema/helper.py b/instill/resources/schema/helper.py index 9896a4c..bcc475f 100644 --- a/instill/resources/schema/helper.py +++ b/instill/resources/schema/helper.py @@ -1,6 +1,13 @@ +# pylint: disable=no-member,wrong-import-position,no-name-in-module import re from dataclasses import fields, is_dataclass +from instill.protogen.vdp.pipeline.v1beta.pipeline_pb2 import ( + ConnectorComponent, + IteratorComponent, + OperatorComponent, +) + def populate_default_value(dc): for field in fields(dc): @@ -37,19 +44,37 @@ def pop_default_and_to_dict(dc) -> dict: return output_dict -def construct_component_config(inp): +def construct_component_config( + component_type: str, + definition_name: str, + inp, +): task_name = str(inp.__class__).split(".")[3] prefix = task_name.split("_")[0] + "_" suffix = "_" + task_name.split("_")[-1] - config = { - "input": pop_default_and_to_dict(inp), - "task": remove_prefix_and_suffix( - task_name, - prefix, - suffix, - ).upper(), - } - return config + inp = pop_default_and_to_dict(inp) + task = remove_prefix_and_suffix( + task_name, + prefix, + suffix, + ).upper() + + if component_type == "connector": + component = ConnectorComponent( + definition_name=definition_name, + task=task, + input=inp, + ) + elif component_type == "operator": + component = OperatorComponent( + definition_name=definition_name, + task=task, + input=inp, + ) + elif component_type == "iterator": + component = IteratorComponent() + + return component def remove_prefix(text: str, prefix: str) -> str: diff --git a/instill/resources/schema/huggingface_task_conversational_input.py b/instill/resources/schema/huggingface_task_conversational_input.py index f3ddc69..0145bab 100644 --- a/instill/resources/schema/huggingface_task_conversational_input.py +++ b/instill/resources/schema/huggingface_task_conversational_input.py @@ -26,7 +26,7 @@ class Parameters: max_time: Optional[float] = None min_length: Optional[int] = None repetition_penalty: Optional[float] = None - temperature: Optional[float] = 1.0 + temperature: Optional[float] = 1 top_k: Optional[int] = None top_p: Optional[float] = None diff --git a/instill/resources/schema/huggingface_task_summarization_input.py b/instill/resources/schema/huggingface_task_summarization_input.py index f80a077..5911384 100644 --- a/instill/resources/schema/huggingface_task_summarization_input.py +++ b/instill/resources/schema/huggingface_task_summarization_input.py @@ -19,7 +19,7 @@ class Parameters: max_time: Optional[float] = None min_length: Optional[int] = None repetition_penalty: Optional[float] = None - temperature: Optional[float] = 1.0 + temperature: Optional[float] = 1 top_k: Optional[int] = None top_p: Optional[float] = None diff --git a/instill/resources/schema/instill.py b/instill/resources/schema/instill.py index 74d4fc0..3a128f8 100644 --- a/instill/resources/schema/instill.py +++ b/instill/resources/schema/instill.py @@ -4,19 +4,19 @@ from __future__ import annotations from dataclasses import dataclass -from typing import Any, Optional, Union +from typing import Union @dataclass class InstillModelConnectorInternalMode: - mode: Optional[Any] = None + mode: str = 'Internal Mode' @dataclass class InstillModelConnectorExternalMode: api_token: str server_url: str - mode: Optional[Any] = None + mode: str = 'External Mode' InstillModelConnector = Union[ diff --git a/instill/resources/schema/jsons/archetypeai_definitions.json b/instill/resources/schema/jsons/archetypeai_definitions.json new file mode 100644 index 0000000..75e3de8 --- /dev/null +++ b/instill/resources/schema/jsons/archetypeai_definitions.json @@ -0,0 +1 @@ +{ "$schema": "http://json-schema.org/draft-07/schema#", "additionalProperties": false, "properties": { "api_key": { "description": "Fill your Archetype AI API key", "instillCredentialField": true, "instillUIOrder": 0, "title": "API Key", "type": "string" } }, "required": [ "api_key" ], "title": "Archetype AI Connector Specification", "type": "object" } diff --git a/instill/resources/schema/jsons/archetypeai_task_describe_input.json b/instill/resources/schema/jsons/archetypeai_task_describe_input.json new file mode 100644 index 0000000..c8edbf6 --- /dev/null +++ b/instill/resources/schema/jsons/archetypeai_task_describe_input.json @@ -0,0 +1 @@ +{"instillUIOrder":0,"properties":{"query":{"description":"A guide to describe the video","instillAcceptFormats":["string"],"instillUIMultiline":true,"instillUIOrder":0,"instillUpstreamTypes":["value","reference","template"],"title":"Query","type":"string"},"file_ids":{"description":"The IDs of the videos to describe. These must have been previously uploaded via TASK_UPLOAD_FILE.","instillAcceptFormats":["array:string"],"instillUIOrder":1,"instillUpstreamTypes":["value","reference"],"items":{"instillUIMultiline":false,"type":"string"},"minItems":1,"title":"File IDs","type":"array"}},"required":["query","file_ids"],"title":"Input","type":"object"} diff --git a/instill/resources/schema/jsons/archetypeai_task_describe_output.json b/instill/resources/schema/jsons/archetypeai_task_describe_output.json new file mode 100644 index 0000000..a0c51a5 --- /dev/null +++ b/instill/resources/schema/jsons/archetypeai_task_describe_output.json @@ -0,0 +1 @@ +{"instillUIOrder":0,"properties":{"descriptions":{"description":"A set of descriptions corresponding to different moments in the video","instillUIOrder":0,"title":"Descriptions","type":"array","items":{"title":"Frame description","type":"object","properties":{"frame_id":{"description":"The frame number in the video that is being described","instillFormat":"integer","instillUIOrder":3,"required":[],"title":"Frame ID","type":"integer"},"timestamp":{"description":"The moment of the video (in seconds since the start) that is being described","instillFormat":"number","instillUIOrder":1,"title":"Timestamp","type":"number"},"description":{"description":"The description of the frame","instillFormat":"string","instillUIOrder":2,"title":"Description","type":"string"}},"required":["description","timestamp","frame_id"]}}},"required":["descriptions"],"title":"Output","type":"object"} diff --git a/instill/resources/schema/jsons/archetypeai_task_summarize_input.json b/instill/resources/schema/jsons/archetypeai_task_summarize_input.json new file mode 100644 index 0000000..71f7b7c --- /dev/null +++ b/instill/resources/schema/jsons/archetypeai_task_summarize_input.json @@ -0,0 +1 @@ +{"instillUIOrder":0,"properties":{"query":{"description":"A guide to summarize the image","instillAcceptFormats":["string"],"instillUIMultiline":true,"instillUIOrder":0,"instillUpstreamTypes":["value","reference","template"],"title":"Query","type":"string"},"file_ids":{"description":"The IDs of the images to summarize. These must have been previously uploaded via TASK_UPLOAD_FILE.","instillAcceptFormats":["array:string"],"instillUIOrder":1,"instillUpstreamTypes":["value","reference"],"items":{"instillUIMultiline":false,"type":"string"},"minItems":1,"title":"File IDs","type":"array"}},"required":["query","file_ids"],"title":"Input","type":"object"} diff --git a/instill/resources/schema/jsons/archetypeai_task_summarize_output.json b/instill/resources/schema/jsons/archetypeai_task_summarize_output.json new file mode 100644 index 0000000..37be6b8 --- /dev/null +++ b/instill/resources/schema/jsons/archetypeai_task_summarize_output.json @@ -0,0 +1 @@ +{"instillUIOrder":0,"properties":{"response":{"description":"A text responding to the query","instillFormat":"string","instillUIOrder":0,"title":"Response","type":"string"}},"required":["response"],"title":"Output","type":"object"} diff --git a/instill/resources/schema/jsons/archetypeai_task_upload_file_input.json b/instill/resources/schema/jsons/archetypeai_task_upload_file_input.json new file mode 100644 index 0000000..604444b --- /dev/null +++ b/instill/resources/schema/jsons/archetypeai_task_upload_file_input.json @@ -0,0 +1 @@ +{"instillUIOrder":0,"properties":{"file":{"title":"File","description":"The file to upload. Accepted formats are JPEG and PNG for images or MP4 for videos","type":"string","instillAcceptFormats":["video/*","image/*"],"instillUIOrder":0,"instillUpstreamTypes":["reference"]}},"required":["file"],"title":"Input","type":"object"} diff --git a/instill/resources/schema/jsons/archetypeai_task_upload_file_output.json b/instill/resources/schema/jsons/archetypeai_task_upload_file_output.json new file mode 100644 index 0000000..885d13c --- /dev/null +++ b/instill/resources/schema/jsons/archetypeai_task_upload_file_output.json @@ -0,0 +1 @@ +{"instillUIOrder":0,"properties":{"file_id":{"instillFormat":"string","instillUIOrder":0,"title":"File ID","description":"The ID to reference the file in queries","type":"string"}},"required":["file_id"],"title":"Output","type":"object"} diff --git a/instill/resources/schema/jsons/archetypeai_tasks.json b/instill/resources/schema/jsons/archetypeai_tasks.json new file mode 100644 index 0000000..a2ae3e5 --- /dev/null +++ b/instill/resources/schema/jsons/archetypeai_tasks.json @@ -0,0 +1 @@ +{"TASK_DESCRIBE": {"instillShortDescription": "Describe a video.", "input": {"instillUIOrder": 0, "properties": {"query": {"description": "A guide to describe the video", "instillAcceptFormats": ["string"], "instillUIMultiline": true, "instillUIOrder": 0, "instillUpstreamTypes": ["value", "reference", "template"], "title": "Query", "type": "string"}, "file_ids": {"description": "The IDs of the videos to describe. These must have been previously uploaded via TASK_UPLOAD_FILE.", "instillAcceptFormats": ["array:string"], "instillUIOrder": 1, "instillUpstreamTypes": ["value", "reference"], "items": {"instillUIMultiline": false, "type": "string"}, "minItems": 1, "title": "File IDs", "type": "array"}}, "required": ["query", "file_ids"], "title": "Input", "type": "object"}, "output": {"instillUIOrder": 0, "properties": {"descriptions": {"description": "A set of descriptions corresponding to different moments in the video", "instillUIOrder": 0, "title": "Descriptions", "type": "array", "items": {"title": "Frame description", "type": "object", "properties": {"frame_id": {"description": "The frame number in the video that is being described", "instillFormat": "integer", "instillUIOrder": 3, "required": [], "title": "Frame ID", "type": "integer"}, "timestamp": {"description": "The moment of the video (in seconds since the start) that is being described", "instillFormat": "number", "instillUIOrder": 1, "title": "Timestamp", "type": "number"}, "description": {"description": "The description of the frame", "instillFormat": "string", "instillUIOrder": 2, "title": "Description", "type": "string"}}, "required": ["description", "timestamp", "frame_id"]}}}, "required": ["descriptions"], "title": "Output", "type": "object"}}, "TASK_SUMMARIZE": {"instillShortDescription": "Summarize the image.", "input": {"instillUIOrder": 0, "properties": {"query": {"description": "A guide to summarize the image", "instillAcceptFormats": ["string"], "instillUIMultiline": true, "instillUIOrder": 0, "instillUpstreamTypes": ["value", "reference", "template"], "title": "Query", "type": "string"}, "file_ids": {"description": "The IDs of the images to summarize. These must have been previously uploaded via TASK_UPLOAD_FILE.", "instillAcceptFormats": ["array:string"], "instillUIOrder": 1, "instillUpstreamTypes": ["value", "reference"], "items": {"instillUIMultiline": false, "type": "string"}, "minItems": 1, "title": "File IDs", "type": "array"}}, "required": ["query", "file_ids"], "title": "Input", "type": "object"}, "output": {"instillUIOrder": 0, "properties": {"response": {"description": "A text responding to the query", "instillFormat": "string", "instillUIOrder": 0, "title": "Response", "type": "string"}}, "required": ["response"], "title": "Output", "type": "object"}}, "TASK_UPLOAD_FILE": {"instillShortDescription": "Upload file.", "input": {"instillUIOrder": 0, "properties": {"file": {"title": "File", "description": "The file to upload. Accepted formats are JPEG and PNG for images or MP4 for videos", "type": "string", "instillAcceptFormats": ["video/*", "image/*"], "instillUIOrder": 0, "instillUpstreamTypes": ["reference"]}}, "required": ["file"], "title": "Input", "type": "object"}, "output": {"instillUIOrder": 0, "properties": {"file_id": {"instillFormat": "string", "instillUIOrder": 0, "title": "File ID", "description": "The ID to reference the file in queries", "type": "string"}}, "required": ["file_id"], "title": "Output", "type": "object"}}} \ No newline at end of file diff --git a/instill/resources/schema/jsons/huggingface_task_conversational_input.json b/instill/resources/schema/jsons/huggingface_task_conversational_input.json index d77c243..f4a8c21 100644 --- a/instill/resources/schema/jsons/huggingface_task_conversational_input.json +++ b/instill/resources/schema/jsons/huggingface_task_conversational_input.json @@ -1 +1 @@ -{"instillUIOrder":0,"properties":{"inputs":{"instillUIOrder":1,"properties":{"generated_responses":{"description":"A list of strings corresponding to the earlier replies from the model.","instillAcceptFormats":["array:string"],"instillUIOrder":0,"instillUpstreamTypes":["value","reference"],"items":{"instillUIMultiline":true,"type":"string"},"title":"Generated Responses","type":"array"},"past_user_inputs":{"description":"A list of strings corresponding to the earlier replies from the user. Should be of the same length of generated_responses.","instillAcceptFormats":["array:string"],"instillShortDescription":"A list of strings corresponding to the earlier replies from the user.","instillUIOrder":1,"instillUpstreamTypes":["value","reference"],"items":{"instillUIMultiline":true,"type":"string"},"title":"Past User Inputs","type":"array"},"text":{"description":"The last input from the user in the conversation.","instillAcceptFormats":["string"],"instillUIMultiline":true,"instillUIOrder":2,"instillUpstreamTypes":["value","reference","template"],"title":"Text","type":"string"}},"required":["text"],"title":"Inputs","type":"object"},"model":{"description":"The Hugging Face model to be used","instillAcceptFormats":["string"],"instillUpstreamTypes":["value","reference","template"],"title":"Model","type":"string"},"options":{"properties":{"use_cache":{"description":"There is a cache layer on the inference API to speedup requests we have already seen. Most models can use those results as is as models are deterministic (meaning the results will be the same anyway). However if you use a non deterministic model, you can set this parameter to prevent the caching mechanism from being used resulting in a real new query.","instillAcceptFormats":["boolean"],"instillShortDescription":"Enable the cache of inference API","instillUIOrder":1,"instillUpstreamTypes":["value","reference"],"title":"Use Cache","type":"boolean"},"wait_for_model":{"description":"If the model is not ready, wait for it instead of receiving 503. It limits the number of requests required to get your inference done. It is advised to only set this flag to true after receiving a 503 error as it will limit hanging in your application to known places.","instillAcceptFormats":["boolean"],"instillShortDescription":"Wait for model ready","instillUIOrder":0,"instillUpstreamTypes":["value","reference"],"title":"Wait For Model","type":"boolean"}},"required":[],"title":"Options","type":"object"},"parameters":{"instillUIOrder":2,"properties":{"max_length":{"description":"Integer to define the maximum length in tokens of the output summary.","instillAcceptFormats":["integer"],"instillUIOrder":0,"instillUpstreamTypes":["value","reference"],"title":"Max Length","type":"integer"},"max_time":{"description":"The amount of time in seconds that the query should take maximum. Network can cause some overhead so it will be a soft limit.","instillAcceptFormats":["number","integer"],"instillShortDescription":"The amount of time in seconds that the query should take maximum.","instillUIOrder":1,"instillUpstreamTypes":["value","reference"],"maximum":120.0,"minimum":0.0,"title":"Max Time","type":"number"},"min_length":{"description":"Integer to define the minimum length in tokens of the output summary.","instillAcceptFormats":["integer"],"instillUIOrder":2,"instillUpstreamTypes":["value","reference"],"title":"Min Length","type":"integer"},"repetition_penalty":{"description":"The more a token is used within generation the more it is penalized to not be picked in successive generation passes.","instillAcceptFormats":["number","integer"],"instillUIOrder":3,"instillUpstreamTypes":["value","reference"],"maximum":100.0,"minimum":0.0,"title":"Repetition Penalty","type":"number"},"temperature":{"default":1.0,"description":"The temperature of the sampling operation. 1 means regular sampling, 0 means always take the highest score, 100.0 is getting closer to uniform probability.","instillAcceptFormats":["number","integer"],"instillShortDescription":"The temperature of the sampling operation.","instillUIOrder":4,"instillUpstreamTypes":["value","reference"],"maximum":100.0,"minimum":0.0,"title":"Temperature","type":"number"},"top_k":{"description":"Integer to define the top tokens considered within the sample operation to create new text.","instillAcceptFormats":["integer"],"instillUIOrder":5,"instillUpstreamTypes":["value","reference"],"title":"Top K","type":"integer"},"top_p":{"description":"Float to define the tokens that are within the sample operation of text generation. Add tokens in the sample for more probable to least probable until the sum of the probabilities is greater than top_p.","instillAcceptFormats":["number","integer"],"instillShortDescription":"Float to define the tokens that are within the sample operation of text generation.","instillUIOrder":6,"instillUpstreamTypes":["value","reference"],"title":"Top P","type":"number"}},"required":[],"title":"Parameters","type":"object"}},"required":["inputs"],"title":"Input","type":"object"} +{"instillUIOrder":0,"properties":{"inputs":{"instillUIOrder":1,"properties":{"generated_responses":{"description":"A list of strings corresponding to the earlier replies from the model.","instillAcceptFormats":["array:string"],"instillUIOrder":0,"instillUpstreamTypes":["value","reference"],"items":{"instillUIMultiline":true,"type":"string"},"title":"Generated Responses","type":"array"},"past_user_inputs":{"description":"A list of strings corresponding to the earlier replies from the user. Should be of the same length of generated_responses.","instillAcceptFormats":["array:string"],"instillShortDescription":"A list of strings corresponding to the earlier replies from the user.","instillUIOrder":1,"instillUpstreamTypes":["value","reference"],"items":{"instillUIMultiline":true,"type":"string"},"title":"Past User Inputs","type":"array"},"text":{"description":"The last input from the user in the conversation.","instillAcceptFormats":["string"],"instillUIMultiline":true,"instillUIOrder":2,"instillUpstreamTypes":["value","reference","template"],"title":"Text","type":"string"}},"required":["text"],"title":"Inputs","type":"object"},"model":{"description":"The Hugging Face model to be used","instillAcceptFormats":["string"],"instillUpstreamTypes":["value","reference","template"],"title":"Model","type":"string"},"options":{"properties":{"use_cache":{"description":"There is a cache layer on the inference API to speedup requests we have already seen. Most models can use those results as is as models are deterministic (meaning the results will be the same anyway). However if you use a non deterministic model, you can set this parameter to prevent the caching mechanism from being used resulting in a real new query.","instillAcceptFormats":["boolean"],"instillShortDescription":"Enable the cache of inference API","instillUIOrder":1,"instillUpstreamTypes":["value","reference"],"title":"Use Cache","type":"boolean"},"wait_for_model":{"description":"If the model is not ready, wait for it instead of receiving 503. It limits the number of requests required to get your inference done. It is advised to only set this flag to true after receiving a 503 error as it will limit hanging in your application to known places.","instillAcceptFormats":["boolean"],"instillShortDescription":"Wait for model ready","instillUIOrder":0,"instillUpstreamTypes":["value","reference"],"title":"Wait For Model","type":"boolean"}},"required":[],"title":"Options","type":"object"},"parameters":{"instillUIOrder":2,"properties":{"max_length":{"description":"Integer to define the maximum length in tokens of the output summary.","instillAcceptFormats":["integer"],"instillUIOrder":0,"instillUpstreamTypes":["value","reference"],"title":"Max Length","type":"integer"},"max_time":{"description":"The amount of time in seconds that the query should take maximum. Network can cause some overhead so it will be a soft limit.","instillAcceptFormats":["number","integer"],"instillShortDescription":"The amount of time in seconds that the query should take maximum.","instillUIOrder":1,"instillUpstreamTypes":["value","reference"],"maximum":120,"minimum":0,"title":"Max Time","type":"number"},"min_length":{"description":"Integer to define the minimum length in tokens of the output summary.","instillAcceptFormats":["integer"],"instillUIOrder":2,"instillUpstreamTypes":["value","reference"],"title":"Min Length","type":"integer"},"repetition_penalty":{"description":"The more a token is used within generation the more it is penalized to not be picked in successive generation passes.","instillAcceptFormats":["number","integer"],"instillUIOrder":3,"instillUpstreamTypes":["value","reference"],"maximum":100,"minimum":0,"title":"Repetition Penalty","type":"number"},"temperature":{"default":1,"description":"The temperature of the sampling operation. 1 means regular sampling, 0 means always take the highest score, 100.0 is getting closer to uniform probability.","instillAcceptFormats":["number","integer"],"instillShortDescription":"The temperature of the sampling operation.","instillUIOrder":4,"instillUpstreamTypes":["value","reference"],"maximum":100,"minimum":0,"title":"Temperature","type":"number"},"top_k":{"description":"Integer to define the top tokens considered within the sample operation to create new text.","instillAcceptFormats":["integer"],"instillUIOrder":5,"instillUpstreamTypes":["value","reference"],"title":"Top K","type":"integer"},"top_p":{"description":"Float to define the tokens that are within the sample operation of text generation. Add tokens in the sample for more probable to least probable until the sum of the probabilities is greater than top_p.","instillAcceptFormats":["number","integer"],"instillShortDescription":"Float to define the tokens that are within the sample operation of text generation.","instillUIOrder":6,"instillUpstreamTypes":["value","reference"],"title":"Top P","type":"number"}},"required":[],"title":"Parameters","type":"object"}},"required":["inputs"],"title":"Input","type":"object"} diff --git a/instill/resources/schema/jsons/huggingface_task_summarization_input.json b/instill/resources/schema/jsons/huggingface_task_summarization_input.json index 8c3566b..284c6a3 100644 --- a/instill/resources/schema/jsons/huggingface_task_summarization_input.json +++ b/instill/resources/schema/jsons/huggingface_task_summarization_input.json @@ -1 +1 @@ -{"instillUIOrder":0,"properties":{"inputs":{"description":"String input","instillAcceptFormats":["string"],"instillUIOrder":0,"instillUpstreamTypes":["value","reference","template"],"title":"String Input","type":"string"},"model":{"description":"The Hugging Face model to be used","instillAcceptFormats":["string"],"instillUpstreamTypes":["value","reference","template"],"title":"Model","type":"string"},"options":{"properties":{"use_cache":{"description":"There is a cache layer on the inference API to speedup requests we have already seen. Most models can use those results as is as models are deterministic (meaning the results will be the same anyway). However if you use a non deterministic model, you can set this parameter to prevent the caching mechanism from being used resulting in a real new query.","instillAcceptFormats":["boolean"],"instillShortDescription":"Enable the cache of inference API","instillUIOrder":1,"instillUpstreamTypes":["value","reference"],"title":"Use Cache","type":"boolean"},"wait_for_model":{"description":"If the model is not ready, wait for it instead of receiving 503. It limits the number of requests required to get your inference done. It is advised to only set this flag to true after receiving a 503 error as it will limit hanging in your application to known places.","instillAcceptFormats":["boolean"],"instillShortDescription":"Wait for model ready","instillUIOrder":0,"instillUpstreamTypes":["value","reference"],"title":"Wait For Model","type":"boolean"}},"required":[],"title":"Options","type":"object"},"parameters":{"instillUIOrder":2,"properties":{"max_length":{"description":"Integer to define the maximum length in tokens of the output summary.","instillAcceptFormats":["integer"],"instillUIOrder":0,"instillUpstreamTypes":["value","reference"],"title":"Max Length","type":"integer"},"max_time":{"instillAcceptFormats":["number","integer"],"instillUIOrder":1,"instillUpstreamTypes":["value","reference"],"maximum":120.0,"minimum":0.0,"title":"Max Time","type":"number"},"min_length":{"description":"Integer to define the minimum length in tokens of the output summary.","instillAcceptFormats":["integer"],"instillUIOrder":2,"instillUpstreamTypes":["value","reference"],"title":"Min Length","type":"integer"},"repetition_penalty":{"description":"The more a token is used within generation the more it is penalized to not be picked in successive generation passes.","instillAcceptFormats":["number","integer"],"instillUIOrder":3,"instillUpstreamTypes":["value","reference"],"maximum":100.0,"minimum":0.0,"title":"Repetition Penalty","type":"number"},"temperature":{"default":1.0,"description":"The temperature of the sampling operation. 1 means regular sampling, 0 means always take the highest score, 100.0 is getting closer to uniform probability.","instillAcceptFormats":["number","integer"],"instillShortDescription":"The temperature of the sampling operation.","instillUIOrder":4,"instillUpstreamTypes":["value","reference"],"maximum":100.0,"minimum":0.0,"title":"Temperature","type":"number"},"top_k":{"description":"Integer to define the top tokens considered within the sample operation to create new text.","instillAcceptFormats":["integer"],"instillUIOrder":5,"instillUpstreamTypes":["value","reference"],"title":"Top K","type":"integer"},"top_p":{"description":"Float to define the tokens that are within the sample operation of text generation. Add tokens in the sample for more probable to least probable until the sum of the probabilities is greater than top_p.","instillAcceptFormats":["number","integer"],"instillShortDescription":"Float to define the tokens that are within the sample operation of text generation.","instillUIOrder":6,"instillUpstreamTypes":["value","reference"],"title":"Top P","type":"number"}},"required":[],"title":"Parameters","type":"object"}},"required":["inputs"],"title":"Input","type":"object"} +{"instillUIOrder":0,"properties":{"inputs":{"description":"String input","instillAcceptFormats":["string"],"instillUIOrder":0,"instillUpstreamTypes":["value","reference","template"],"title":"String Input","type":"string"},"model":{"description":"The Hugging Face model to be used","instillAcceptFormats":["string"],"instillUpstreamTypes":["value","reference","template"],"title":"Model","type":"string"},"options":{"properties":{"use_cache":{"description":"There is a cache layer on the inference API to speedup requests we have already seen. Most models can use those results as is as models are deterministic (meaning the results will be the same anyway). However if you use a non deterministic model, you can set this parameter to prevent the caching mechanism from being used resulting in a real new query.","instillAcceptFormats":["boolean"],"instillShortDescription":"Enable the cache of inference API","instillUIOrder":1,"instillUpstreamTypes":["value","reference"],"title":"Use Cache","type":"boolean"},"wait_for_model":{"description":"If the model is not ready, wait for it instead of receiving 503. It limits the number of requests required to get your inference done. It is advised to only set this flag to true after receiving a 503 error as it will limit hanging in your application to known places.","instillAcceptFormats":["boolean"],"instillShortDescription":"Wait for model ready","instillUIOrder":0,"instillUpstreamTypes":["value","reference"],"title":"Wait For Model","type":"boolean"}},"required":[],"title":"Options","type":"object"},"parameters":{"instillUIOrder":2,"properties":{"max_length":{"description":"Integer to define the maximum length in tokens of the output summary.","instillAcceptFormats":["integer"],"instillUIOrder":0,"instillUpstreamTypes":["value","reference"],"title":"Max Length","type":"integer"},"max_time":{"instillAcceptFormats":["number","integer"],"instillUIOrder":1,"instillUpstreamTypes":["value","reference"],"maximum":120,"minimum":0,"title":"Max Time","type":"number"},"min_length":{"description":"Integer to define the minimum length in tokens of the output summary.","instillAcceptFormats":["integer"],"instillUIOrder":2,"instillUpstreamTypes":["value","reference"],"title":"Min Length","type":"integer"},"repetition_penalty":{"description":"The more a token is used within generation the more it is penalized to not be picked in successive generation passes.","instillAcceptFormats":["number","integer"],"instillUIOrder":3,"instillUpstreamTypes":["value","reference"],"maximum":100,"minimum":0,"title":"Repetition Penalty","type":"number"},"temperature":{"default":1,"description":"The temperature of the sampling operation. 1 means regular sampling, 0 means always take the highest score, 100.0 is getting closer to uniform probability.","instillAcceptFormats":["number","integer"],"instillShortDescription":"The temperature of the sampling operation.","instillUIOrder":4,"instillUpstreamTypes":["value","reference"],"maximum":100,"minimum":0,"title":"Temperature","type":"number"},"top_k":{"description":"Integer to define the top tokens considered within the sample operation to create new text.","instillAcceptFormats":["integer"],"instillUIOrder":5,"instillUpstreamTypes":["value","reference"],"title":"Top K","type":"integer"},"top_p":{"description":"Float to define the tokens that are within the sample operation of text generation. Add tokens in the sample for more probable to least probable until the sum of the probabilities is greater than top_p.","instillAcceptFormats":["number","integer"],"instillShortDescription":"Float to define the tokens that are within the sample operation of text generation.","instillUIOrder":6,"instillUpstreamTypes":["value","reference"],"title":"Top P","type":"number"}},"required":[],"title":"Parameters","type":"object"}},"required":["inputs"],"title":"Input","type":"object"} diff --git a/instill/resources/schema/numbers_task_register_input.py b/instill/resources/schema/numbers_task_register_input.py index 76a042b..d2d22dc 100644 --- a/instill/resources/schema/numbers_task_register_input.py +++ b/instill/resources/schema/numbers_task_register_input.py @@ -38,9 +38,9 @@ class Input: headline: Optional[str] = None caption: Optional[str] = None asset_creator: Optional[str] = None - digital_source_type: Optional[ - DigitalSourceType - ] = DigitalSourceType.trainedAlgorithmicMedia + digital_source_type: Optional[DigitalSourceType] = ( + DigitalSourceType.trainedAlgorithmicMedia + ) generated_by: Optional[str] = None license: Optional[License] = None mining_preference: Optional[MiningPreference] = MiningPreference.notAllowed diff --git a/instill/resources/schema/redis.py b/instill/resources/schema/redis.py index 02c2481..865367e 100644 --- a/instill/resources/schema/redis.py +++ b/instill/resources/schema/redis.py @@ -14,7 +14,7 @@ class DisableSSL(Enum): @dataclass class DisableSSLMode: - mode: DisableSSL + mode: DisableSSL = DisableSSL.disable class Enable(Enum): @@ -26,7 +26,7 @@ class VerifyFullSSLMode: ca_cert: str client_cert: str client_key: str - mode: Enable + mode: Enable = Enable.verify_full @dataclass diff --git a/instill/resources/schema/restapi.py b/instill/resources/schema/restapi.py index 84dcccb..6a698ae 100644 --- a/instill/resources/schema/restapi.py +++ b/instill/resources/schema/restapi.py @@ -10,14 +10,14 @@ @dataclass class NoAuth: - auth_type: str + auth_type: str = 'NO_AUTH' @dataclass class BasicAuth: - auth_type: str password: str username: str + auth_type: str = 'BASIC_AUTH' class WhereToAddAPIKeyTo(Enum): @@ -28,15 +28,15 @@ class WhereToAddAPIKeyTo(Enum): @dataclass class APIKey: auth_location: WhereToAddAPIKeyTo - auth_type: str key: str value: str + auth_type: str = 'API_KEY' @dataclass class BearerToken: - auth_type: str token: str + auth_type: str = 'BEARER_TOKEN' @dataclass diff --git a/instill/resources/trigger.py b/instill/resources/trigger.py new file mode 100644 index 0000000..372998d --- /dev/null +++ b/instill/resources/trigger.py @@ -0,0 +1,56 @@ +# pylint: disable=no-member,wrong-import-position +from dataclasses import dataclass +from typing import List + +import instill.protogen.vdp.pipeline.v1beta.pipeline_pb2 as pipeline_pb + + +@dataclass +class TriggerByRequestRequestFields: + key: str + title: str + description: str + format: str + + +@dataclass +class TriggerByRequestResponseFields: + key: str + title: str + description: str + value: str + + +class Trigger: + def __init__( + self, + request_fields: List[TriggerByRequestRequestFields], + response_fields: List[TriggerByRequestResponseFields], + ) -> None: + + req = {} + for req_f in request_fields: + req[req_f.key] = pipeline_pb.TriggerByRequest.RequestField( + title=req_f.title, + description=req_f.description, + instill_format=req_f.format, + ) + resp = {} + for resp_f in response_fields: + resp[resp_f.key] = pipeline_pb.TriggerByRequest.ResponseField( + title=resp_f.title, + description=resp_f.description, + value=resp_f.value, + ) + + t = pipeline_pb.Trigger( + trigger_by_request=pipeline_pb.TriggerByRequest( + request_fields=req, + response_fields=resp, + ) + ) + + self.t = t + + def get_trigger(self) -> pipeline_pb.Trigger: + return self.t