diff --git a/airbyte-integrations/connectors/destination-vectara/.dockerignore b/airbyte-integrations/connectors/destination-vectara/.dockerignore new file mode 100644 index 0000000000000..f784000e19e24 --- /dev/null +++ b/airbyte-integrations/connectors/destination-vectara/.dockerignore @@ -0,0 +1,5 @@ +* +!Dockerfile +!main.py +!destination_vectara +!setup.py diff --git a/airbyte-integrations/connectors/destination-vectara/Dockerfile b/airbyte-integrations/connectors/destination-vectara/Dockerfile new file mode 100644 index 0000000000000..cf50f0758e223 --- /dev/null +++ b/airbyte-integrations/connectors/destination-vectara/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY destination_vectara ./destination_vectara + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/destination-vectara diff --git a/airbyte-integrations/connectors/destination-vectara/README.md b/airbyte-integrations/connectors/destination-vectara/README.md new file mode 100644 index 0000000000000..2c68229551bcf --- /dev/null +++ b/airbyte-integrations/connectors/destination-vectara/README.md @@ -0,0 +1,123 @@ +# Vectara Destination + +This is the repository for the Vectara destination connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.com/integrations/destinations/vectara). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.7.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Building via Gradle +From the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:destination-vectara:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/destinations/vectara) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `destination_vectara/spec.json` file. +Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `destination vectara test creds` +and place them into `secrets/config.json`. + +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config.json +python main.py discover --config secrets/config.json +python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/destination-vectara:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:destination-vectara:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/destination-vectara:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-vectara:dev check --config /secrets/config.json +# messages.jsonl is a file containing line-separated JSON representing AirbyteMessages +cat messages.jsonl | docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-vectara:dev write --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. +First install test dependencies into your virtual environment: +``` +pip install .[tests] +``` +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +python -m pytest unit_tests +``` + +### Integration Tests +There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all destination connectors) and custom integration tests (which are specific to this connector). +#### Custom Integration tests +Place custom tests inside `integration_tests/` folder, then, from the connector root, run +``` +python -m pytest integration_tests +``` +#### Acceptance Tests +Coming soon: + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:destination-vectara:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:destination-vectara:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/destination-vectara/destination_vectara/__init__.py b/airbyte-integrations/connectors/destination-vectara/destination_vectara/__init__.py new file mode 100644 index 0000000000000..1bc53911e4ef4 --- /dev/null +++ b/airbyte-integrations/connectors/destination-vectara/destination_vectara/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from .destination import DestinationVectara + +__all__ = ["DestinationVectara"] diff --git a/airbyte-integrations/connectors/destination-vectara/destination_vectara/client.py b/airbyte-integrations/connectors/destination-vectara/destination_vectara/client.py new file mode 100644 index 0000000000000..16171c1fb666a --- /dev/null +++ b/airbyte-integrations/connectors/destination-vectara/destination_vectara/client.py @@ -0,0 +1,188 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import datetime +import json +import requests +import traceback + +from typing import Any, Mapping + +from destination_vectara.config import VectaraConfig + + + +METADATA_STREAM_FIELD = "_ab_stream" +# METADATA_RECORD_ID_FIELD = "_ab_record_id" + +class VectaraClient: + + BASE_URL = "https://api.vectara.io/v1" + + def __init__(self, config: VectaraConfig): + self.customer_id = config.customer_id + self.corpus_name = config.corpus_name + self.client_id = config.oauth2.client_id + self.client_secret = config.oauth2.client_secret + # self.corpus_id = config.corpus_id + + def check(self): + try: + jwt_token = self._get_jwt_token() + if not jwt_token: + return "Unable to get JWT Token. Confirm your Client ID and Client Secret." + + list_corpora_response = self._request( + endpoint="list-corpora", + data={ + "numResults": 100, + "filter": self.corpus_name + } + ) + possible_corpora_ids_names_map = {corpus.get("id"): corpus.get("name") for corpus in list_corpora_response.get("corpus") if corpus.get("name") == self.corpus_name} + if len(possible_corpora_ids_names_map) > 1: + return f"Multiple Corpora exist with name {self.corpus_name}" + if len(possible_corpora_ids_names_map) == 1: + self.corpus_id = list(possible_corpora_ids_names_map.keys())[0] + else: + create_corpus_response = self._request( + endpoint="create-corpus", + data={ + "corpus": { + "name": self.corpus_name, + "filterAttributes": [ + { + "name": METADATA_STREAM_FIELD, + "indexed": True, + "type": "FILTER_ATTRIBUTE_TYPE__TEXT", + "level": "FILTER_ATTRIBUTE_LEVEL__DOCUMENT" + }, + # { + # "name": METADATA_RECORD_ID_FIELD, + # "indexed": True, + # "type": "FILTER_ATTRIBUTE_TYPE__TEXT", + # "level": "FILTER_ATTRIBUTE_LEVEL__DOCUMENT" + # } + ] + } + } + ) + self.corpus_id = create_corpus_response.get("corpusId") + + except Exception as e: + return str(e) + "\n" + "".join(traceback.TracebackException.from_exception(e).format()) + + def _get_jwt_token(self): + """Connect to the server and get a JWT token.""" + token_endpoint = f"https://vectara-prod-{self.customer_id}.auth.us-west-2.amazoncognito.com/oauth2/token" + headers = { + "Content-Type": "application/x-www-form-urlencoded", + } + data = { + "grant_type": "client_credentials", + "client_id": self.client_id, + "client_secret": self.client_secret + } + + request_time = datetime.datetime.now().timestamp() + response = requests.request(method="POST", url=token_endpoint, headers=headers, data=data) + response_json = response.json() + + self.jwt_token = response_json.get("access_token") + self.jwt_token_expires_ts = request_time + response_json.get("expires_in") + return self.jwt_token + + def _request( + self, endpoint: str, http_method: str = "POST", params: Mapping[str, Any] = None, data: Mapping[str, Any] = None + ): + + url = f"{self.BASE_URL}/{endpoint}" + + current_ts = datetime.datetime.now().timestamp() + if self.jwt_token_expires_ts - current_ts <= 60: + self._get_jwt_token() + + headers = { + "Content-Type": "application/json", + "Accept": "application/json", + "Authorization": f"Bearer {self.jwt_token}", + "customer-id": self.customer_id + } + + response = requests.request(method=http_method, url=url, headers=headers, params=params, data=json.dumps(data)) + response.raise_for_status() + return response.json() + + def delete_doc_by_metadata(self, metadata_field_name, metadata_field_values): + document_ids = [] + for value in metadata_field_values: + query_documents_response = self._request( + endpoint="query", + data= { + "query": [ + { + "query": "", + "numResults": 100, + "corpusKey": [ + { + "customerId": self.customer_id, + "corpusId": self.corpus_id, + "metadataFilter": f"doc.{metadata_field_name} = '{value}'" + } + ] + } + ] + } + ) + document_ids.extend([document.get("id") for document in query_documents_response.get("responseSet").get("document")]) + return self.delete_docs_by_id(document_ids=document_ids) + + def delete_docs_by_id(self, document_ids): + documents_not_deleted = [] + for document_id in document_ids: + delete_document_response = self._request( + endpoint="delete-doc", + data={ + "customerId": self.customer_id, + "corpusId": self.corpus_id, + "documentId": document_id + } + ) + # TODO whether this is needed? + if delete_document_response: + documents_not_deleted.append(document_id) + return documents_not_deleted + + def index_documents(self, documents): + for document_section, document_metadata, document_id in documents: + document_metadata = self._normalize(document_metadata) + index_document_response = self._request( + endpoint="index", + data={ + "customerId": self.customer_id, + "corpusId": self.corpus_id, + "document": { + "documentId": document_id, + "metadataJson": json.dumps(document_metadata), + "section": [ + { + "title": section_title, + "text": section_text + } + for section_title, section_text in document_section.items() + ] + } + } + ) + assert index_document_response.get("status").get("code") == "OK", index_document_response.get("status").get("statusDetail") + + def _normalize(self, metadata: dict) -> dict: + result = {} + for key, value in metadata.items(): + if isinstance(value, (str, int, float, bool)): + result[key] = value + else: + # JSON encode all other types + result[key] = json.dumps(value) + return result diff --git a/airbyte-integrations/connectors/destination-vectara/destination_vectara/config.py b/airbyte-integrations/connectors/destination-vectara/destination_vectara/config.py new file mode 100644 index 0000000000000..10c17570608f0 --- /dev/null +++ b/airbyte-integrations/connectors/destination-vectara/destination_vectara/config.py @@ -0,0 +1,59 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from typing import List, Optional + +from pydantic import BaseModel, Field +from airbyte_cdk.utils.spec_schema_transformations import resolve_refs + + +class OAuth2(BaseModel): + client_id: str = Field(..., title="OAuth Client ID", description="OAuth2.0 client id", order=0) + client_secret: str = Field( + ..., title="OAuth Client Secret", description="OAuth2.0 client secret", airbyte_secret=True, order=1 + ) + + class Config: + title = "OAuth2.0 Credentials" + schema_extra = { + "description": "OAuth2.0 credentials used to authenticate admin actions (creating/deleting corpora)", + "group": "auth", + } + +class VectaraConfig(BaseModel): + oauth2: OAuth2 + customer_id: str = Field(..., title="Customer ID", description="Your customer id as it is in the authenticaion url", order=2, group="account") + corpus_name: str = Field(..., title="Corpus Name", description="The Name of Corpus to load data into", order=3, group="account") + + text_fields: Optional[List[str]] = Field( + default=[], + title="Text fields to embed", + description="List of fields in the record that should be in the section of the document. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array.", + always_show=True, + examples=["text", "user.name", "users.*.name"], + ) + metadata_fields: Optional[List[str]] = Field( + default=[], + title="Fields to store as metadata", + description="List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.", + always_show=True, + examples=["age", "user", "user.name"], + ) + + class Config: + title = "Vectara Config" + schema_extra = { + "description": "Configuration to connect to the Vectara instance", + "groups": [ + {"id": "account", "title": "Account"}, + {"id": "auth", "title": "Authentication"}, + ] + } + + @classmethod + def schema(cls): + """we're overriding the schema classmethod to enable some post-processing""" + schema = super().schema() + schema = resolve_refs(schema) + return schema \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-vectara/destination_vectara/destination.py b/airbyte-integrations/connectors/destination-vectara/destination_vectara/destination.py new file mode 100644 index 0000000000000..bda7d10326ae0 --- /dev/null +++ b/airbyte-integrations/connectors/destination-vectara/destination_vectara/destination.py @@ -0,0 +1,92 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from typing import Any, Iterable, Mapping + +from airbyte_cdk import AirbyteLogger +from airbyte_cdk.destinations import Destination +from airbyte_cdk.models import ( + AirbyteConnectionStatus, + AirbyteMessage, + ConfiguredAirbyteCatalog, + ConnectorSpecification, + DestinationSyncMode, + Status, + Type, +) +from destination_vectara.config import VectaraConfig +from destination_vectara.client import VectaraClient +from destination_vectara.writer import VectaraWriter + + + +class DestinationVectara(Destination): + + def write( + self, config: Mapping[str, Any], configured_catalog: ConfiguredAirbyteCatalog, input_messages: Iterable[AirbyteMessage] + ) -> Iterable[AirbyteMessage]: + + """ + Reads the input stream of messages, config, and catalog to write data to the destination. + + This method returns an iterable (typically a generator of AirbyteMessages via yield) containing state messages received + in the input message stream. Outputting a state message means that every AirbyteRecordMessage which came before it has been + successfully persisted to the destination. This is used to ensure fault tolerance in the case that a sync fails before fully completing, + then the source is given the last state message output from this method as the starting point of the next sync. + + :param config: dict of JSON configuration matching the configuration declared in spec.json + :param configured_catalog: The Configured Catalog describing the schema of the data being received and how it should be persisted in the + destination + :param input_messages: The stream of input messages received from the source + :return: Iterable of AirbyteStateMessages wrapped in AirbyteMessage structs + """ + + config_model = VectaraConfig.parse_obj(config) + writer = VectaraWriter(client=VectaraClient(config_model), text_fields=config_model.text_fields, metadata_fields=config_model.metadata_fields) + + writer.delete_streams_to_overwrite(catalog=configured_catalog) + + for message in input_messages: + if message.type == Type.STATE: + # Emitting a state message indicates that all records which came before it have been written to the destination. So we flush + # the queue to ensure writes happen, then output the state message to indicate it's safe to checkpoint state + writer.flush() + yield message + elif message.type == Type.RECORD: + record = message.record + writer.queue_write_operation(record) + else: + # ignore other message types for now + continue + + # Make sure to flush any records still in the queue + writer.flush() + + def check(self, logger: AirbyteLogger, config: VectaraConfig) -> AirbyteConnectionStatus: + """ + Tests if the input configuration can be used to successfully connect to the destination with the needed permissions + e.g: if a provided API token or password can be used to connect and write to the destination. + + :param logger: Logging object to display debug/info/error to the logs + (logs will not be accessible via airbyte UI if they are not passed to this logger) + :param config: Json object containing the configuration of this destination, content of this json is as specified in + the properties of the spec.json file + + :return: AirbyteConnectionStatus indicating a Success or Failure + """ + + client = VectaraClient(config=config) + client_error = client.check() + if client_error: + return AirbyteConnectionStatus(status=Status.FAILED, message="\n".join([client_error])) + else: + return AirbyteConnectionStatus(status=Status.SUCCEEDED) + + def spec(self, *args: Any, **kwargs: Any) -> ConnectorSpecification: + return ConnectorSpecification( + documentationUrl="https://docs.airbyte.com/integrations/destinations/vectara", + supportsIncremental=True, + supported_destination_sync_modes=[DestinationSyncMode.overwrite, DestinationSyncMode.append], + connectionSpecification=VectaraConfig.schema(), + ) diff --git a/airbyte-integrations/connectors/destination-vectara/destination_vectara/writer.py b/airbyte-integrations/connectors/destination-vectara/destination_vectara/writer.py new file mode 100644 index 0000000000000..735756310ca24 --- /dev/null +++ b/airbyte-integrations/connectors/destination-vectara/destination_vectara/writer.py @@ -0,0 +1,115 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import dpath.util +import uuid + +from typing import Any, Dict, List, Mapping, Optional + +from airbyte_cdk.models import ConfiguredAirbyteCatalog, AirbyteRecordMessage, ConfiguredAirbyteStream +from airbyte_cdk.models.airbyte_protocol import DestinationSyncMode +from airbyte_cdk.utils.traced_exception import AirbyteTracedException, FailureType + +from destination_vectara.client import VectaraClient + + + +METADATA_STREAM_FIELD = "_ab_stream" +# METADATA_RECORD_ID_FIELD = "_ab_record_id" + +class VectaraWriter: + + write_buffer: List[Mapping[str, Any]] = [] #TODO fix + flush_interval = 1000 + + def __init__(self, client: VectaraClient, text_fields: Optional[List[str]], metadata_fields: Optional[List[str]], catalog: ConfiguredAirbyteCatalog): + self.client = client + self.text_fields = text_fields + self.metadata_fields = metadata_fields + self.streams = {f"{stream.stream.namespace}_{stream.stream.name}": stream for stream in catalog.streams} + self.ids_to_delete: List[str] + + def delete_streams_to_overwrite(self, catalog: ConfiguredAirbyteCatalog) -> None: + streams_to_overwrite = [ + stream.stream.name for stream in catalog.streams if stream.destination_sync_mode == DestinationSyncMode.overwrite + ] + if len(streams_to_overwrite): + self.client.delete_doc_by_metadata(metadata_field_name=METADATA_STREAM_FIELD, metadata_field_values=streams_to_overwrite) + + def _delete_documents_to_dedupe(self): + if len(self.ids_to_delete) > 0: + self.client.delete_docs_by_id(document_ids=self.ids_to_delete) + + def queue_write_operation(self, record: AirbyteRecordMessage) -> None: + """Adds messages to the write queue and flushes if the buffer is full""" + + document_section = self._get_document_section(record=record) + document_metadata = self._get_document_metadata(record=record) + primary_key = self._get_record_primary_key(record=record) + + document_id = uuid.uuid4().int + if primary_key: + document_id = primary_key + self.ids_to_delete.append(primary_key) + + self.write_buffer.append((document_section, document_metadata, document_id)) + if len(self.write_buffer) == self.flush_interval: + self.flush() + + def flush(self) -> None: + """Writes to Convex""" + self._delete_documents_to_dedupe() + self.client.index_documents(self.write_buffer) + self.write_buffer.clear() + self.ids_to_delete.clear() + + def _get_document_section(self, record: AirbyteRecordMessage): + relevant_fields = self._extract_relevant_fields(record, self.text_fields) + if len(relevant_fields) == 0: + text_fields = ", ".join(self.text_fields) if self.text_fields else "all fields" + raise AirbyteTracedException( + internal_message="No text fields found in record", + message=f"Record {str(record.data)[:250]}... does not contain any of the configured text fields: {text_fields}. Please check your processing configuration, there has to be at least one text field set in each record.", + failure_type=FailureType.config_error, + ) + document_section = relevant_fields + return document_section + + def _extract_relevant_fields(self, record: AirbyteRecordMessage, fields: Optional[List[str]]) -> Dict[str, Any]: + relevant_fields = {} + if fields and len(fields) > 0: + for field in fields: + values = dpath.util.values(record.data, field, separator=".") + if values and len(values) > 0: + relevant_fields[field] = values if len(values) > 1 else values[0] + else: + relevant_fields = record.data + return relevant_fields + + + def _get_document_metadata(self, record: AirbyteRecordMessage) -> Dict[str, Any]: + document_metadata = self._extract_relevant_fields(record, self.metadata_fields) + document_metadata[METADATA_STREAM_FIELD] = record.stream + return document_metadata + + def _get_record_primary_key(self, record: AirbyteRecordMessage) -> Optional[str]: + stream_identifier = f"{record.namespace}_{record.stream}" + current_stream: ConfiguredAirbyteStream = self.streams[stream_identifier] + if not current_stream.primary_key: + return + raise AirbyteTracedException( + internal_message="No primary key found in current stream", + message=f"Stream {stream_identifier}... does not contain any configured primary key path. Please check your source stream, there has to be a primary key path configured.", + failure_type=FailureType.config_error, + ) + + primary_key = [] + for key in current_stream.primary_key: + try: + primary_key.append(str(dpath.util.get(record.data, key))) + except KeyError: + primary_key.append("__not_found__") + stringified_primary_key = "_".join(primary_key) + return f"{stream_identifier}_{stringified_primary_key}" + diff --git a/airbyte-integrations/connectors/destination-vectara/icon.jpg b/airbyte-integrations/connectors/destination-vectara/icon.jpg new file mode 100644 index 0000000000000..b9e32d26b05e9 Binary files /dev/null and b/airbyte-integrations/connectors/destination-vectara/icon.jpg differ diff --git a/airbyte-integrations/connectors/destination-vectara/integration_tests/integration_test.py b/airbyte-integrations/connectors/destination-vectara/integration_tests/integration_test.py new file mode 100644 index 0000000000000..d945ab6b09af1 --- /dev/null +++ b/airbyte-integrations/connectors/destination-vectara/integration_tests/integration_test.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +def integration_test(): + # TODO write integration tests + pass diff --git a/airbyte-integrations/connectors/destination-vectara/main.py b/airbyte-integrations/connectors/destination-vectara/main.py new file mode 100644 index 0000000000000..289b411fb3181 --- /dev/null +++ b/airbyte-integrations/connectors/destination-vectara/main.py @@ -0,0 +1,11 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import sys + +from destination_vectara import DestinationVectara + +if __name__ == "__main__": + DestinationVectara().run(sys.argv[1:]) diff --git a/airbyte-integrations/connectors/destination-vectara/metadata.yaml b/airbyte-integrations/connectors/destination-vectara/metadata.yaml new file mode 100644 index 0000000000000..c9bcfca25185b --- /dev/null +++ b/airbyte-integrations/connectors/destination-vectara/metadata.yaml @@ -0,0 +1,26 @@ +data: + allowedHosts: + hosts: + - api.vectara.io + - "vectara-prod-${self.customer_id}.auth.us-west-2.amazoncognito.com" + registries: + oss: + enabled: false + cloud: + enabled: false + connectorSubtype: database + connectorType: destination + definitionId: 102900e7-a236-4c94-83e4-a4189b99adc2 + dockerImageTag: 0.1.0 + dockerRepository: airbyte/destination-vectara + githubIssueLabel: destination-vectara + icon: vectara.svg + license: MIT + name: Vectara + releaseDate: 2023-11-10 + releaseStage: alpha + supportLevel: community + documentationUrl: https://docs.airbyte.com/integrations/destinations/vectara + tags: + - language:python +metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-vectara/requirements.txt b/airbyte-integrations/connectors/destination-vectara/requirements.txt new file mode 100644 index 0000000000000..d6e1198b1ab1f --- /dev/null +++ b/airbyte-integrations/connectors/destination-vectara/requirements.txt @@ -0,0 +1 @@ +-e . diff --git a/airbyte-integrations/connectors/destination-vectara/setup.py b/airbyte-integrations/connectors/destination-vectara/setup.py new file mode 100644 index 0000000000000..1ab6542c702b9 --- /dev/null +++ b/airbyte-integrations/connectors/destination-vectara/setup.py @@ -0,0 +1,25 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk", +] + +TEST_REQUIREMENTS = ["pytest~=6.2"] + +setup( + name="destination_vectara", + description="Destination implementation for Vectara.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/destination-vectara/unit_tests/__init__.py b/airbyte-integrations/connectors/destination-vectara/unit_tests/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/airbyte-integrations/connectors/destination-vectara/unit_tests/test_destination.py b/airbyte-integrations/connectors/destination-vectara/unit_tests/test_destination.py new file mode 100644 index 0000000000000..d0f047a7c7acc --- /dev/null +++ b/airbyte-integrations/connectors/destination-vectara/unit_tests/test_destination.py @@ -0,0 +1,96 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import unittest +from unittest.mock import MagicMock, Mock, patch + +from airbyte_cdk import AirbyteLogger +from airbyte_cdk.models import ConnectorSpecification, Status +from destination_vectara.config import ConfigModel +from destination_vectara.destination import DestinationVectara + + +class TestDestinationVectara(unittest.TestCase): + def setUp(self): + self.config = { + "processing": {"text_fields": ["str_col"], "metadata_fields": [], "chunk_size": 1000}, + "embedding": {"mode": "openai", "openai_key": "mykey"}, + "indexing": { + "oauth2": {"client_id": "client_id", "client_secret": "client_secret"}, + "customer_id": "customer_id", + "corpus_name": "corpus_name" + }, + } + self.config_model = ConfigModel.parse_obj(self.config) + self.logger = AirbyteLogger() + + @patch("destination_vectara.destination.VectaraIndexer") + @patch("destination_vectara.destination.create_from_config") + def test_check(self, MockedEmbedder, MockedVectaraIndexer): + mock_embedder = Mock() + mock_indexer = Mock() + MockedVectaraIndexer.return_value = mock_indexer + MockedEmbedder.return_value = mock_embedder + + mock_embedder.check.return_value = None + mock_indexer.check.return_value = None + + destination = DestinationVectara() + result = destination.check(self.logger, self.config) + + self.assertEqual(result.status, Status.SUCCEEDED) + mock_embedder.check.assert_called_once() + mock_indexer.check.assert_called_once() + + @patch("destination_vectara.destination.VectaraIndexer") + @patch("destination_vectara.destination.create_from_config") + def test_check_with_errors(self, MockedEmbedder, MockedVectaraIndexer): + mock_embedder = Mock() + mock_indexer = Mock() + MockedVectaraIndexer.return_value = mock_indexer + MockedEmbedder.return_value = mock_embedder + + embedder_error_message = "Embedder Error" + indexer_error_message = "Indexer Error" + + mock_embedder.check.return_value = embedder_error_message + mock_indexer.check.return_value = indexer_error_message + + destination = DestinationVectara() + result = destination.check(self.logger, self.config) + + self.assertEqual(result.status, Status.FAILED) + self.assertEqual(result.message, f"{embedder_error_message}\n{indexer_error_message}") + + mock_embedder.check.assert_called_once() + mock_indexer.check.assert_called_once() + + @patch("destination_vectara.destination.Writer") + @patch("destination_vectara.destination.VectaraIndexer") + @patch("destination_vectara.destination.create_from_config") + def test_write(self, MockedEmbedder, MockedVectaraIndexer, MockedWriter): + mock_embedder = Mock() + mock_indexer = Mock() + mock_writer = Mock() + + MockedVectaraIndexer.return_value = mock_indexer + MockedWriter.return_value = mock_writer + MockedEmbedder.return_value = mock_embedder + + mock_writer.write.return_value = [] + + configured_catalog = MagicMock() + input_messages = [] + + destination = DestinationVectara() + list(destination.write(self.config, configured_catalog, input_messages)) + + MockedWriter.assert_called_once_with(self.config_model.processing, mock_indexer, mock_embedder, batch_size=128) + mock_writer.write.assert_called_once_with(configured_catalog, input_messages) + + def test_spec(self): + destination = DestinationVectara() + result = destination.spec() + + self.assertIsInstance(result, ConnectorSpecification) diff --git a/airbyte-integrations/connectors/destination-vectara/unit_tests/test_writer.py b/airbyte-integrations/connectors/destination-vectara/unit_tests/test_writer.py new file mode 100644 index 0000000000000..ae93953dc24d7 --- /dev/null +++ b/airbyte-integrations/connectors/destination-vectara/unit_tests/test_writer.py @@ -0,0 +1,116 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import datetime +import unittest +from unittest.mock import Mock + +from airbyte_cdk.models.airbyte_protocol import AirbyteStream, DestinationSyncMode, SyncMode +from destination_vectara.config import VectaraConfig +from destination_vectara.writer import VectaraWriter + + +class TestVectaraWriter(unittest.TestCase): + def setUp(self): + self.mock_config = VectaraConfig( + **{ + "oauth2": {"client_id": "client_id", "client_secret": "client_secret"}, + "customer_id": "customer_id", + "corpus_name": "corpus_name" + } + ) + + def _request_side_effect(endpoint, data): + if endpoint == "list-corpora": + return {"corpus": []} + if endpoint == "create-corpus": + return {"corpusId": 999} + if endpoint == "query": + return {"responseSet": {"document": [{"id": 0}, {"id": 1}, {"id": 2}]}} + if endpoint == "delete-doc": + return {} + if endpoint == "index": + return {"status": {"code": "OK", "statusDetail": "sample status detail"}} + return dict() + + self._request_side_effect = _request_side_effect + + self.vectara_indexer = VectaraWriter(self.mock_config) + self.vectara_indexer._get_jwt_token = Mock() + self.vectara_indexer.jwt_token = Mock() + self.vectara_indexer.jwt_token_expires_ts = datetime.datetime.max.replace(tzinfo=datetime.timezone.utc).timestamp() + self.vectara_indexer._request = Mock() + self.vectara_indexer._request.side_effect = self._request_side_effect + + # self.mock_client = self.vectara_indexer._get_client() + # self.mock_client.get_or_create_collection = Mock() + # self.mock_collection = self.mock_client.get_or_create_collection() + # self.vectara_indexer.client = self.mock_client + # self.mock_client.get_collection = Mock() + + def test_invalid_oauth_credentials(self): + self.vectara_indexer._get_jwt_token.return_value = None + result = self.vectara_indexer.check() + self.assertEqual(result, "Unable to get JWT Token. Confirm your Client ID and Client Secret.") + + def test_multiple_corpora_with_corpus_name(self): + self.vectara_indexer._request.side_effect = None + self.vectara_indexer._request.return_value = {"corpus": [{"id": 0, "name": "corpus_name"}, {"id": 1, "name": "corpus_name"}]} + result = self.vectara_indexer.check() + self.assertEqual(result, f"Multiple Corpora exist with name {self.mock_config.corpus_name}") + + def test_one_corpus_with_corpus_name(self): + self.vectara_indexer._request.side_effect = None + self.vectara_indexer._request.return_value = {"corpus": [{"id": 0, "name": "corpus_name"}]} + result = self.vectara_indexer.check() + self.assertEqual(self.vectara_indexer.corpus_id, 0) + + def test_no_corpus_with_corpus_name(self): + result = self.vectara_indexer.check() + self.assertEqual(self.vectara_indexer.corpus_id, 999) + + def test_check_handles_failure_conditions(self): + self.vectara_indexer._request.side_effect = Exception("Random exception") + result = self.vectara_indexer.check() + self.assertTrue("Random exception" in result) + + def test_pre_sync_calls_delete(self): + self.vectara_indexer.check() + self.vectara_indexer.pre_sync( + Mock( + streams=[ + Mock( + destination_sync_mode=DestinationSyncMode.overwrite, + stream=AirbyteStream(name="some_stream", json_schema={}, supported_sync_modes=[SyncMode.full_refresh]), + ) + ] + ) + ) + + self.vectara_indexer._request.assert_any_call(endpoint="delete-doc", data={"customerId": self.mock_config.customer_id, "corpusId": self.vectara_indexer.corpus_id, "documentId": 0}) + self.vectara_indexer._request.assert_any_call(endpoint="delete-doc", data={"customerId": self.mock_config.customer_id, "corpusId": self.vectara_indexer.corpus_id, "documentId": 1}) + self.vectara_indexer._request.assert_any_call(endpoint="delete-doc", data={"customerId": self.mock_config.customer_id, "corpusId": self.vectara_indexer.corpus_id, "documentId": 2}) + + def test_pre_sync_does_not_call_delete(self): + self.vectara_indexer.pre_sync( + Mock(streams=[Mock(destination_sync_mode=DestinationSyncMode.append, stream=Mock(name="some_stream"))]) + ) + + self.vectara_indexer._request.assert_not_called() + + def test_delete_calls_delete(self): + self.vectara_indexer.check() + self.vectara_indexer.delete([0, 1, 2], None, "some_stream") + + self.vectara_indexer._request.assert_any_call(endpoint="delete-doc", data={"customerId": self.mock_config.customer_id, "corpusId": self.vectara_indexer.corpus_id, "documentId": 0}) + self.vectara_indexer._request.assert_any_call(endpoint="delete-doc", data={"customerId": self.mock_config.customer_id, "corpusId": self.vectara_indexer.corpus_id, "documentId": 1}) + self.vectara_indexer._request.assert_any_call(endpoint="delete-doc", data={"customerId": self.mock_config.customer_id, "corpusId": self.vectara_indexer.corpus_id, "documentId": 2}) + + def test_index_calls_index(self): + self.vectara_indexer.corpus_id = 0 + result = self.vectara_indexer.index([Mock(metadata={"key": "value"}, page_content="some content", embedding=[1, 2, 3])], None, "some_stream") + + print(result) + + self.vectara_indexer._request.assert_called_once() diff --git a/docs/integrations/destinations/vectara.md b/docs/integrations/destinations/vectara.md new file mode 100644 index 0000000000000..f7eea250faeaa --- /dev/null +++ b/docs/integrations/destinations/vectara.md @@ -0,0 +1,49 @@ +# Vectara + +This page contains the setup guide and reference information for the Vectara destination connector. + +Get started with Vectara at the [Vectara website](https://vectara.com/). For more details about how Vectara works, see the [Vectara documentation](https://docs.vectara.com/) + +## Overview + +The Vectara destination connector supports Full Refresh Overwrite, Full Refresh Append, and Incremental Append. + +### Output schema + +All streams will be output into a corpus in Vectara whose name must be specified in the config. + +Note that there are no restrictions in naming the Vectara corpus and if a corpus with the specified name is not found, a new corpus with that name will be created. Also, if multiple corpora exists with the same name, an error will be returned as Airbyte will be unable to determine the prefered corpus. + + +### Features + +| Feature | Supported? | +| :---------------------------- | :--------- | +| Full Refresh Sync | Yes | +| Incremental - Append Sync | Yes | +| Incremental - Dedupe Sync | No | + + +## Getting started + +### Requirements + +- [Vectara Account](https://console.vectara.com/signup) +- [Vectara Corpus](https://docs.vectara.com/docs/console-ui/creating-a-corpus) +- [OAuth2.0 Credentials](https://docs.vectara.com/docs/learn/authentication/oauth-2) + +### Setup the Chroma Destination in Airbyte + +You should now have all the requirements needed to configure Chroma as a destination in the UI. You'll need the following information to configure the Chroma destination: + +- (Required) OAuth2.0 Credentials + - (Required) **Client ID** + - (Required) **Client Secret** +- (Required) **Customer ID** +- (Required) **Corpus Name** + +## Changelog + +| Version | Date | Pull Request | Subject | +| :------ | :--------- | :------------------------------------------------------- | :---------------------------------------------------------------- | +| 0.1.0 | 2023-11-10 | [31958](https://github.com/airbytehq/airbyte/pull/31958) | 🎉 New Destination: Vectara (Vector Database) |