From 15aef242a5451bdfd88a638c7a0d65f74b4a4959 Mon Sep 17 00:00:00 2001 From: Teal Larson Date: Mon, 9 May 2022 04:13:19 -0400 Subject: [PATCH 01/55] Fix upgrade guide link (#12693) * update upgrade guide link --- airbyte-webapp/src/config/uiConfig.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/airbyte-webapp/src/config/uiConfig.ts b/airbyte-webapp/src/config/uiConfig.ts index 894b77e587f0d..6e8d7f52959ff 100644 --- a/airbyte-webapp/src/config/uiConfig.ts +++ b/airbyte-webapp/src/config/uiConfig.ts @@ -1,23 +1,23 @@ -const BASE_DOCS_LINK = "https://docs.airbyte.io"; +const BASE_DOCS_LINK = "https://docs.airbyte.com"; const uiConfig = { technicalSupport: `${BASE_DOCS_LINK}/troubleshooting/on-deploying`, termsLink: "https://airbyte.com/terms", privacyLink: "https://airbyte.com/privacy-policy", helpLink: "https://airbyte.com/community", - gitLink: "https://docs.airbyte.com/quickstart/deploy-airbyte", + gitLink: `${BASE_DOCS_LINK}/quickstart/deploy-airbyte`, updateLink: `${BASE_DOCS_LINK}/operator-guides/upgrading-airbyte`, productReleaseStages: `${BASE_DOCS_LINK}/project-overview/product-release-stages`, slackLink: "https://slack.airbyte.com", supportTicketLink: "https://airbyte.com/contact-support", docsLink: BASE_DOCS_LINK, - configurationArchiveLink: `${BASE_DOCS_LINK}/tutorials/upgrading-airbyte`, + configurationArchiveLink: `${BASE_DOCS_LINK}/operator-guides/upgrading-airbyte/`, normalizationLink: `${BASE_DOCS_LINK}/understanding-airbyte/connections#airbyte-basic-normalization`, namespaceLink: `${BASE_DOCS_LINK}/understanding-airbyte/namespaces`, tutorialLink: "https://www.youtube.com/watch?v=Rcpt5SVsMpk&feature=emb_logo", statusLink: "https://status.airbyte.io/", recipesLink: "https://airbyte.com/recipes", - syncModeLink: "https://docs.airbyte.com/understanding-airbyte/connections/incremental-deduped-history", + syncModeLink: `${BASE_DOCS_LINK}/understanding-airbyte/connections/incremental-deduped-history`, demoLink: "https://demo.airbyte.io", contactSales: "https://airbyte.com/talk-to-sales", webpageLink: "https://airbyte.com", From 27e6ce2ca82ea7088b4df2e1531eeb439fa2885c Mon Sep 17 00:00:00 2001 From: Serhii Lazebnyi <53845333+lazebnyi@users.noreply.github.com> Date: Mon, 9 May 2022 14:56:52 +0300 Subject: [PATCH 02/55] Source Amazon S3: Refactored docs (#12534) * Refactored spec and docs * Updated spec.json * Rollback spec fromating * Rollback spec fromating * Rollback spec fromating --- .../source-s3/integration_tests/spec.json | 2 +- docs/integrations/sources/s3.md | 94 ++++++++----------- 2 files changed, 42 insertions(+), 54 deletions(-) diff --git a/airbyte-integrations/connectors/source-s3/integration_tests/spec.json b/airbyte-integrations/connectors/source-s3/integration_tests/spec.json index 602628f04e58c..80ec9ec7f9752 100644 --- a/airbyte-integrations/connectors/source-s3/integration_tests/spec.json +++ b/airbyte-integrations/connectors/source-s3/integration_tests/spec.json @@ -221,4 +221,4 @@ }, "supportsIncremental": true, "supported_destination_sync_modes": ["overwrite", "append", "append_dedup"] -} +} \ No newline at end of file diff --git a/docs/integrations/sources/s3.md b/docs/integrations/sources/s3.md index 1029a08de41aa..526f5a634681b 100644 --- a/docs/integrations/sources/s3.md +++ b/docs/integrations/sources/s3.md @@ -1,28 +1,45 @@ -# S3 +# Amazon S3 -## Overview +This page contains the setup guide and reference information for the Amazon S3 source connector. -The S3 source enables syncing of **file-based tables** with support for multiple files using glob-like pattern matching, and both Full Refresh and Incremental syncs, using the last\_modified property of files to determine incremental batches. -**This connector does not support syncing unstructured data files such as raw text, audio, or videos.** -You can choose if this connector will read only the new/updated files, or all the matching files, every time a sync is run. +## Prerequisites -Connector allows using either Amazon S3 storage or 3rd party S3 compatible service like Wasabi or custom S3 services set up with minio, leofs, ceph etc. + -### Output Schema + -At this time, this source produces only a single stream \(table\) for the target files. +## Setup guide -By default, the schema will be automatically inferred from all the relevant files present when setting up the connection, however you can also specify a schema in the source settings to enforce desired columns and datatypes. Any additional columns found \(on any sync\) are packed into an extra mapping field called `_ab_additional_properties`. Any missing columns will be added and null-filled. +### Step 1: Set up Amazon S3 -We'll be considering extending these behaviours in the future and welcome your feedback! +* If syncing from a private bucket, the credentials you use for the connection must have have both `read` and `list` access on the S3 bucket. `list` is required to discover files based on the provided pattern\(s\). + +## Step 2: Set up the Amazon S3 connector in Airbyte + +### For Airbyte Cloud: + +1. [Log into your Airbyte Cloud](https://cloud.airbyte.io/workspaces) account. +2. In the left navigation bar, click ****. In the top-right corner, click **+new source/destination**. +3. On the Set up the page, enter the name for the connector and select **** from the type dropdown. +4. Set `dataset` appropriately. This will be the name of the table in the destination. +3. If your bucket contains _only_ files containing data for this table, use `**` as path\_pattern. See the [Path Patterns section](s3.md#path-patterns) for more specific pattern matching. +4. Leave schema as `{}` to automatically infer it from the file\(s\). For details on providing a schema, see the [User Schema section](s3.md#user-schema). +5. Fill in the fields within the provider box appropriately. If your bucket is not public, add [credentials](https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html#access-keys-and-secret-access-keys) with sufficient permissions under `aws_access_key_id` and `aws_secret_access_key`. +6. Choose the format corresponding to the format of your files and fill in fields as required. If unsure about values, try out the defaults and come back if needed. Find details on these settings [here](s3.md#file-format-settings). + +### For Airbyte OSS: -Note that you should provide the `dataset` which dictates how the table will be identified in the destination. +1. Create a new S3 source with a suitable name. Since each S3 source maps to just a single table, it may be worth including that in the name. +2. Set `dataset` appropriately. This will be the name of the table in the destination. +3. If your bucket contains _only_ files containing data for this table, use `**` as path\_pattern. See the [Path Patterns section](s3.md#path-patterns) for more specific pattern matching. +4. Leave schema as `{}` to automatically infer it from the file\(s\). For details on providing a schema, see the [User Schema section](s3.md#user-schema). +5. Fill in the fields within the provider box appropriately. If your bucket is not public, add [credentials](https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html#access-keys-and-secret-access-keys) with sufficient permissions under `aws_access_key_id` and `aws_secret_access_key`. +6. Choose the format corresponding to the format of your files and fill in fields as required. If unsure about values, try out the defaults and come back if needed. Find details on these settings [here](s3.md#file-format-settings). -### Data Types -Currently, complex types \(array and object\) are coerced to string, but we'll be looking to improve support for this in the future! +## Supported sync modes -### Features +The Amazon S3 source connector supports the following [sync modes](https://docs.airbyte.com/cloud/core-concepts#connection-sync-modes): | Feature | Supported? | | :--- | :--- | @@ -33,7 +50,8 @@ Currently, complex types \(array and object\) are coerced to string, but we'll b | Replicate Multiple Streams \(distinct tables\) | No | | Namespaces | No | -### File Compressions + +## File Compressions | Compression | Supported? | | :--- | :--- | @@ -46,41 +64,8 @@ Currently, complex types \(array and object\) are coerced to string, but we'll b Please let us know any specific compressions you'd like to see support for next! -### File Formats - -File Formats are mostly enabled \(and further tested\) thanks to other open-source libraries that we are using under the hood such as: - -* [PyArrow](https://arrow.apache.org/docs/python/csv.html) -| Format | Supported? | -| :--- | :--- | -| CSV | Yes | -| Parquet | Yes | -| JSON | No | -| HTML | No | -| XML | No | -| Excel | No | -| Feather | No | -| Pickle | No | - -We're looking to enable these other formats very soon, so watch this space! - -## Getting started - -### Requirements - -* If syncing from a private bucket, the credentials you use for the connection must have have both `read` and `list` access on the S3 bucket. `list` is required to discover files based on the provided pattern\(s\). - -### Quickstart - -1. Create a new S3 source with a suitable name. Since each S3 source maps to just a single table, it may be worth including that in the name. -2. Set `dataset` appropriately. This will be the name of the table in the destination. -3. If your bucket contains _only_ files containing data for this table, use `**` as path\_pattern. See the [Path Patterns section](s3.md#path-patterns) for more specific pattern matching. -4. Leave schema as `{}` to automatically infer it from the file\(s\). For details on providing a schema, see the [User Schema section](s3.md#user-schema). -5. Fill in the fields within the provider box appropriately. If your bucket is not public, add [credentials](https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html#access-keys-and-secret-access-keys) with sufficient permissions under `aws_access_key_id` and `aws_secret_access_key`. -6. Choose the format corresponding to the format of your files and fill in fields as required. If unsure about values, try out the defaults and come back if needed. Find details on these settings [here](s3.md#file-format-settings). - -### Path Pattern +## Path Pattern \(tl;dr -> path pattern syntax using [wcmatch.glob](https://facelessuser.github.io/wcmatch/glob/). GLOBSTAR and SPLIT flags are enabled.\) @@ -130,7 +115,8 @@ We want to pick up part1.csv, part2.csv and part3.csv \(excluding another\_part1 As you can probably tell, there are many ways to achieve the same goal with path patterns. We recommend using a pattern that ensures clarity and is robust against future additions to the directory structure. -### User Schema + +## User Schema Providing a schema allows for more control over the output of this stream. Without a provided schema, columns and datatypes will be inferred from each file and a superset schema created. This will probably be fine in most cases but there may be situations you want to enforce a schema instead, e.g.: @@ -154,7 +140,8 @@ For example: * {"id": "integer", "location": "string", "longitude": "number", "latitude": "number"} * {"username": "string", "friends": "array", "information": "object"} -### S3 Provider Settings + +## S3 Provider Settings * `bucket` : name of the bucket your files are in * `aws_access_key_id` : one half of the [required credentials](https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html#access-keys-and-secret-access-keys) for accessing a private bucket. @@ -170,7 +157,7 @@ For example: Note that all files within one stream must adhere to the same read options for every provided format. -#### CSV +### CSV Since CSV files are effectively plain text, providing specific reader options is often required for correct parsing of the files. These settings are applied when a CSV is created or exported so please ensure that this process happens consistently over time. @@ -192,7 +179,7 @@ Since CSV files are effectively plain text, providing specific reader options is {"column_names": ["column1", "column2", "column3"]} ``` -#### Parquet +### Parquet Apache Parquet file is a column-oriented data storage format of the Apache Hadoop ecosystem. It provides efficient data compression and encoding schemes with enhanced performance to handle complex data in bulk. For now this solutiion are iterating through individual files at the abstract-level thus partitioned parquet datasets are unsupported. The following settings are available: @@ -202,6 +189,7 @@ Apache Parquet file is a column-oriented data storage format of the Apache Hadoo You can find details on [here](https://arrow.apache.org/docs/python/generated/pyarrow.parquet.ParquetFile.html#pyarrow.parquet.ParquetFile.iter_batches). + ## Changelog | Version | Date | Pull Request | Subject | From 4ff17abcd54549e4608b76cde070d80e01a66941 Mon Sep 17 00:00:00 2001 From: terencecho Date: Mon, 9 May 2022 10:28:49 -0400 Subject: [PATCH 03/55] Use different CustomerIO templates and edit slack notifs (#12674) * Use different CustomerIO templates and edit slack notifs * fix unit test * fix file naming and comments --- ...java => CustomerioNotificationClient.java} | 54 ++++++++++--------- .../notification/NotificationClient.java | 15 ++++-- .../notification/SlackNotificationClient.java | 32 +++++------ .../auto_disable_notification_template.json | 11 ++-- ...disable_warning_notification_template.json | 19 ------- .../customerio/default_template.json | 17 ++++++ ...to_disable_slack_notification_template.txt | 5 +- ...le_warning_slack_notification_template.txt | 5 +- .../CustomerioNotificationClientTest.java | 6 ++- .../SlackNotificationClientTest.java | 26 ++++++--- .../scheduler/persistence/JobNotifier.java | 4 +- 11 files changed, 110 insertions(+), 84 deletions(-) rename airbyte-notification/src/main/java/io/airbyte/notification/{CustomeriolNotificationClient.java => CustomerioNotificationClient.java} (70%) delete mode 100644 airbyte-notification/src/main/resources/customerio/auto_disable_warning_notification_template.json create mode 100644 airbyte-notification/src/main/resources/customerio/default_template.json diff --git a/airbyte-notification/src/main/java/io/airbyte/notification/CustomeriolNotificationClient.java b/airbyte-notification/src/main/java/io/airbyte/notification/CustomerioNotificationClient.java similarity index 70% rename from airbyte-notification/src/main/java/io/airbyte/notification/CustomeriolNotificationClient.java rename to airbyte-notification/src/main/java/io/airbyte/notification/CustomerioNotificationClient.java index 1d7ca81bd5f4e..8e966d9766a04 100644 --- a/airbyte-notification/src/main/java/io/airbyte/notification/CustomeriolNotificationClient.java +++ b/airbyte-notification/src/main/java/io/airbyte/notification/CustomerioNotificationClient.java @@ -12,35 +12,37 @@ import java.net.http.HttpClient; import java.net.http.HttpRequest; import java.net.http.HttpResponse; +import java.util.UUID; import org.apache.commons.lang3.NotImplementedException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Notification client that uses customer.io API send emails. + * + * These notifications rely on `TRANSACTION_MESSAGE_ID`, which are basically templates you create + * through customer.io. These IDs are specific to a user's account on customer.io, so they will be + * different for every user. For now they are stored as variables here, but in the future they may + * be stored in as a notification config in the database. + * + * For Airbyte Cloud, Airbyte engineers may use `DEFAULT_TRANSACTION_MESSAGE_ID = "6"` as a generic + * template for notifications. */ -public class CustomeriolNotificationClient extends NotificationClient { +public class CustomerioNotificationClient extends NotificationClient { - private static final Logger LOGGER = LoggerFactory.getLogger(CustomeriolNotificationClient.class); + private static final Logger LOGGER = LoggerFactory.getLogger(CustomerioNotificationClient.class); - // Once the configs are editable through the UI, these should be stored in - // airbyte-config/models/src/main/resources/types/CustomerioNotificationConfiguration.yaml - // - SENDER_EMAIL - // - receiver email - // - customer.io identifier email - // - customer.io TRANSACTION_MESSAGE_ID - private static final String SENDER_EMAIL = "Airbyte Notification "; - private static final String TRANSACTION_MESSAGE_ID = "6"; + private static final String AUTO_DISABLE_TRANSACTION_MESSAGE_ID = "7"; + private static final String AUTO_DISABLE_WARNING_TRANSACTION_MESSAGE_ID = "8"; private static final String CUSTOMERIO_EMAIL_API_ENDPOINT = "https://api.customer.io/v1/send/email"; private static final String AUTO_DISABLE_NOTIFICATION_TEMPLATE_PATH = "customerio/auto_disable_notification_template.json"; - private static final String AUTO_DISABLE_WARNING_NOTIFICATION_TEMPLATE_PATH = "customerio/auto_disable_warning_notification_template.json"; private final HttpClient httpClient; private final String apiToken; private final String emailApiEndpoint; - public CustomeriolNotificationClient(final Notification notification) { + public CustomerioNotificationClient(final Notification notification) { super(notification); this.apiToken = System.getenv("CUSTOMERIO_API_KEY"); this.emailApiEndpoint = CUSTOMERIO_EMAIL_API_ENDPOINT; @@ -50,10 +52,10 @@ public CustomeriolNotificationClient(final Notification notification) { } @VisibleForTesting - public CustomeriolNotificationClient(final Notification notification, - final String apiToken, - final String emailApiEndpoint, - final HttpClient httpClient) { + public CustomerioNotificationClient(final Notification notification, + final String apiToken, + final String emailApiEndpoint, + final HttpClient httpClient) { super(notification); this.apiToken = apiToken; this.emailApiEndpoint = emailApiEndpoint; @@ -72,28 +74,32 @@ public boolean notifyJobSuccess(final String sourceConnector, final String desti throw new NotImplementedException(); } + // Once the configs are editable through the UI, the reciever email should be stored in + // airbyte-config/models/src/main/resources/types/CustomerioNotificationConfiguration.yaml + // instead of being passed in @Override public boolean notifyConnectionDisabled(final String receiverEmail, final String sourceConnector, final String destinationConnector, final String jobDescription, - final String logUrl) + final UUID workspaceId, + final UUID connectionId) throws IOException, InterruptedException { - final String requestBody = renderTemplate(AUTO_DISABLE_NOTIFICATION_TEMPLATE_PATH, TRANSACTION_MESSAGE_ID, SENDER_EMAIL, receiverEmail, - receiverEmail, sourceConnector, destinationConnector, jobDescription, logUrl); + final String requestBody = renderTemplate(AUTO_DISABLE_NOTIFICATION_TEMPLATE_PATH, AUTO_DISABLE_TRANSACTION_MESSAGE_ID, receiverEmail, + receiverEmail, sourceConnector, destinationConnector, jobDescription, workspaceId.toString(), connectionId.toString()); return notifyByEmail(requestBody); } @Override - public boolean notifyConnectionDisableWarning( - final String receiverEmail, + public boolean notifyConnectionDisableWarning(final String receiverEmail, final String sourceConnector, final String destinationConnector, final String jobDescription, - final String logUrl) + final UUID workspaceId, + final UUID connectionId) throws IOException, InterruptedException { - final String requestBody = renderTemplate(AUTO_DISABLE_WARNING_NOTIFICATION_TEMPLATE_PATH, TRANSACTION_MESSAGE_ID, SENDER_EMAIL, receiverEmail, - receiverEmail, sourceConnector, destinationConnector, jobDescription, logUrl); + final String requestBody = renderTemplate(AUTO_DISABLE_NOTIFICATION_TEMPLATE_PATH, AUTO_DISABLE_WARNING_TRANSACTION_MESSAGE_ID, receiverEmail, + receiverEmail, sourceConnector, destinationConnector, jobDescription, workspaceId.toString(), connectionId.toString()); return notifyByEmail(requestBody); } diff --git a/airbyte-notification/src/main/java/io/airbyte/notification/NotificationClient.java b/airbyte-notification/src/main/java/io/airbyte/notification/NotificationClient.java index 65bd08b67c8fc..02b7465dc37db 100644 --- a/airbyte-notification/src/main/java/io/airbyte/notification/NotificationClient.java +++ b/airbyte-notification/src/main/java/io/airbyte/notification/NotificationClient.java @@ -4,8 +4,10 @@ package io.airbyte.notification; +import io.airbyte.commons.resources.MoreResources; import io.airbyte.config.Notification; import java.io.IOException; +import java.util.UUID; public abstract class NotificationClient { @@ -35,14 +37,16 @@ public abstract boolean notifyConnectionDisabled(String receiverEmail, String sourceConnector, String destinationConnector, String jobDescription, - String logUrl) + UUID workspaceId, + UUID connectionId) throws IOException, InterruptedException; public abstract boolean notifyConnectionDisableWarning(String receiverEmail, String sourceConnector, String destinationConnector, String jobDescription, - String logUrl) + UUID workspaceId, + UUID connectionId) throws IOException, InterruptedException; public abstract boolean notifySuccess(String message) throws IOException, InterruptedException; @@ -52,9 +56,14 @@ public abstract boolean notifyConnectionDisableWarning(String receiverEmail, public static NotificationClient createNotificationClient(final Notification notification) { return switch (notification.getNotificationType()) { case SLACK -> new SlackNotificationClient(notification); - case CUSTOMERIO -> new CustomeriolNotificationClient(notification); + case CUSTOMERIO -> new CustomerioNotificationClient(notification); default -> throw new IllegalArgumentException("Unknown notification type:" + notification.getNotificationType()); }; } + String renderTemplate(final String templateFile, final String... data) throws IOException { + final String template = MoreResources.readResource(templateFile); + return String.format(template, data); + } + } diff --git a/airbyte-notification/src/main/java/io/airbyte/notification/SlackNotificationClient.java b/airbyte-notification/src/main/java/io/airbyte/notification/SlackNotificationClient.java index 9f4837b646e84..9a48fda4c2ed7 100644 --- a/airbyte-notification/src/main/java/io/airbyte/notification/SlackNotificationClient.java +++ b/airbyte-notification/src/main/java/io/airbyte/notification/SlackNotificationClient.java @@ -7,7 +7,6 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap.Builder; import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.resources.MoreResources; import io.airbyte.config.Notification; import io.airbyte.config.SlackNotificationConfiguration; import java.io.IOException; @@ -15,6 +14,7 @@ import java.net.http.HttpClient; import java.net.http.HttpRequest; import java.net.http.HttpResponse; +import java.util.UUID; import org.apache.logging.log4j.util.Strings; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -46,7 +46,7 @@ public SlackNotificationClient(final Notification notification) { @Override public boolean notifyJobFailure(final String sourceConnector, final String destinationConnector, final String jobDescription, final String logUrl) throws IOException, InterruptedException { - return notifyFailure(renderJobData( + return notifyFailure(renderTemplate( "slack/failure_slack_notification_template.txt", sourceConnector, destinationConnector, @@ -57,7 +57,7 @@ public boolean notifyJobFailure(final String sourceConnector, final String desti @Override public boolean notifyJobSuccess(final String sourceConnector, final String destinationConnector, final String jobDescription, final String logUrl) throws IOException, InterruptedException { - return notifySuccess(renderJobData( + return notifySuccess(renderTemplate( "slack/success_slack_notification_template.txt", sourceConnector, destinationConnector, @@ -70,14 +70,16 @@ public boolean notifyConnectionDisabled(final String receiverEmail, final String sourceConnector, final String destinationConnector, final String jobDescription, - final String logUrl) + final UUID workspaceId, + final UUID connectionId) throws IOException, InterruptedException { - final String message = renderJobData( + final String message = renderTemplate( "slack/auto_disable_slack_notification_template.txt", sourceConnector, destinationConnector, jobDescription, - logUrl); + workspaceId.toString(), + connectionId.toString()); final String webhookUrl = config.getWebhook(); if (!Strings.isEmpty(webhookUrl)) { @@ -91,14 +93,16 @@ public boolean notifyConnectionDisableWarning(final String receiverEmail, final String sourceConnector, final String destinationConnector, final String jobDescription, - final String logUrl) + final UUID workspaceId, + final UUID connectionId) throws IOException, InterruptedException { - final String message = renderJobData( + final String message = renderTemplate( "slack/auto_disable_warning_slack_notification_template.txt", sourceConnector, destinationConnector, jobDescription, - logUrl); + workspaceId.toString(), + connectionId.toString()); final String webhookUrl = config.getWebhook(); if (!Strings.isEmpty(webhookUrl)) { @@ -107,16 +111,6 @@ public boolean notifyConnectionDisableWarning(final String receiverEmail, return false; } - private String renderJobData(final String templateFile, - final String sourceConnector, - final String destinationConnector, - final String jobDescription, - final String logUrl) - throws IOException { - final String template = MoreResources.readResource(templateFile); - return String.format(template, sourceConnector, destinationConnector, jobDescription, logUrl); - } - private boolean notify(final String message) throws IOException, InterruptedException { final ImmutableMap body = new Builder() .put("text", message) diff --git a/airbyte-notification/src/main/resources/customerio/auto_disable_notification_template.json b/airbyte-notification/src/main/resources/customerio/auto_disable_notification_template.json index 545d0b737f726..7cfd0bee4071d 100644 --- a/airbyte-notification/src/main/resources/customerio/auto_disable_notification_template.json +++ b/airbyte-notification/src/main/resources/customerio/auto_disable_notification_template.json @@ -1,19 +1,20 @@ { "transactional_message_id": "%s", - "from": "%s", - "subject": "Automatic Notification: Your Airbyte connection has been disabled", "to": "%s", "identifiers": { "email": "%s" }, "message_data": { - "email_title": "Automatic Notification: Connection Disabled", - "email_body": "Your connection from %s to %s was automatically disabled because it failed 100 times consecutively or has been failing for 14 days in a row.

Please address the failing issues to ensure your syncs continue to run. The most recent attempted %s You can access its logs here: %s.

If you need help with resolving your connection, reach out to Support in-app or by emailing cloud-support@airbyte.io." + "source": "%s", + "destination": "%s", + "job_description": "%s", + "connection_id": "%s", + "workspace_id": "%s" }, "disable_message_retention": false, "send_to_unsubscribed": true, - "tracked": true, + "tracked": false, "queue_draft": false, "disable_css_preprocessing": true } diff --git a/airbyte-notification/src/main/resources/customerio/auto_disable_warning_notification_template.json b/airbyte-notification/src/main/resources/customerio/auto_disable_warning_notification_template.json deleted file mode 100644 index bdfde3a9e702c..0000000000000 --- a/airbyte-notification/src/main/resources/customerio/auto_disable_warning_notification_template.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "transactional_message_id": "%s", - "from": "%s", - "subject": "Warning: Your Airbyte connection will be disabled", - "to": "%s", - "identifiers": { - "email": "%s" - }, - "message_data": { - "email_title": "Warning: Connection Failing", - "email_body": "Your connection from %s to %s is scheduled to be automatically disabled because it either failed 50 times consecutively or there were only failed jobs in the past 7 days. Once it has failed 100 times consecutively or has been failing for 14 days in a row, the connection will be automatically disabled.

Please address the failing issues to ensure your syncs continue to run. The most recent attempted %s You can access its logs here: %s.

If you need help with resolving your connection, reach out to Support in-app or by emailing cloud-support@airbyte.io." - }, - - "disable_message_retention": false, - "send_to_unsubscribed": true, - "tracked": true, - "queue_draft": false, - "disable_css_preprocessing": true -} diff --git a/airbyte-notification/src/main/resources/customerio/default_template.json b/airbyte-notification/src/main/resources/customerio/default_template.json new file mode 100644 index 0000000000000..d394666cd7634 --- /dev/null +++ b/airbyte-notification/src/main/resources/customerio/default_template.json @@ -0,0 +1,17 @@ +{ + "transactional_message_id": "%s", + "to": "%s", + "identifiers": { + "email": "%s" + }, + "message_data": { + "email_title": "%s", + "email_body": "%s" + }, + + "disable_message_retention": false, + "send_to_unsubscribed": true, + "tracked": true, + "queue_draft": false, + "disable_css_preprocessing": true +} diff --git a/airbyte-notification/src/main/resources/slack/auto_disable_slack_notification_template.txt b/airbyte-notification/src/main/resources/slack/auto_disable_slack_notification_template.txt index 011308b714a2c..132d03bbf6bec 100644 --- a/airbyte-notification/src/main/resources/slack/auto_disable_slack_notification_template.txt +++ b/airbyte-notification/src/main/resources/slack/auto_disable_slack_notification_template.txt @@ -1,3 +1,6 @@ Your connection from %s to %s was automatically disabled because it failed 100 times consecutively or has been failing for 14 days in a row. -Please address the failing issues to ensure your syncs continue to run. The most recent attempted %s You can access its logs here: %s. +Please address the failing issues to ensure your syncs continue to run. The most recent attempted %s + +Workspace ID: %s +Connection ID: %s diff --git a/airbyte-notification/src/main/resources/slack/auto_disable_warning_slack_notification_template.txt b/airbyte-notification/src/main/resources/slack/auto_disable_warning_slack_notification_template.txt index e2a1ce8da6d6a..ad325bfaf8f74 100644 --- a/airbyte-notification/src/main/resources/slack/auto_disable_warning_slack_notification_template.txt +++ b/airbyte-notification/src/main/resources/slack/auto_disable_warning_slack_notification_template.txt @@ -1,3 +1,6 @@ Your connection from %s to %s is scheduled to be automatically disabled because it either failed 50 times consecutively or there were only failed jobs in the past 7 days. Once it has failed 100 times consecutively or has been failing for 14 days in a row, the connection will be automatically disabled. -Please address the failing issues to ensure your syncs continue to run. The most recent attempted %s You can access its logs here: %s. +Please address the failing issues to ensure your syncs continue to run. The most recent attempted %s + +Workspace ID: %s +Connection ID: %s diff --git a/airbyte-notification/src/test/java/io/airbyte/notification/CustomerioNotificationClientTest.java b/airbyte-notification/src/test/java/io/airbyte/notification/CustomerioNotificationClientTest.java index d6f342ee5c956..f01cee0e7367f 100644 --- a/airbyte-notification/src/test/java/io/airbyte/notification/CustomerioNotificationClientTest.java +++ b/airbyte-notification/src/test/java/io/airbyte/notification/CustomerioNotificationClientTest.java @@ -26,6 +26,7 @@ class CustomerioNotificationClientTest { private static final String API_KEY = "api-key"; private static final String URI_BASE = "https://customer.io"; private static final UUID WORKSPACE_ID = UUID.randomUUID(); + private static final UUID CONNECTION_ID = UUID.randomUUID(); private static final StandardWorkspace WORKSPACE = new StandardWorkspace() .withWorkspaceId(WORKSPACE_ID) .withName("workspace-name") @@ -45,7 +46,7 @@ void setUp() { // this test does _not_ check the body of the request. @Test void notifyConnectionDisabled() throws IOException, InterruptedException { - final CustomeriolNotificationClient customeriolNotificationClient = new CustomeriolNotificationClient(new Notification() + final CustomerioNotificationClient customerioNotificationClient = new CustomerioNotificationClient(new Notification() .withNotificationType(NotificationType.CUSTOMERIO), API_KEY, URI_BASE, mHttpClient); final HttpRequest expectedRequest = HttpRequest.newBuilder() @@ -60,7 +61,8 @@ void notifyConnectionDisabled() throws IOException, InterruptedException { Mockito.when(httpResponse.statusCode()).thenReturn(200); final boolean result = - customeriolNotificationClient.notifyConnectionDisabled(WORKSPACE.getEmail(), RANDOM_INPUT, RANDOM_INPUT, RANDOM_INPUT, RANDOM_INPUT); + customerioNotificationClient.notifyConnectionDisabled(WORKSPACE.getEmail(), RANDOM_INPUT, RANDOM_INPUT, RANDOM_INPUT, WORKSPACE_ID, + CONNECTION_ID); Mockito.verify(mHttpClient).send(expectedRequest, HttpResponse.BodyHandlers.ofString()); assertTrue(result); diff --git a/airbyte-notification/src/test/java/io/airbyte/notification/SlackNotificationClientTest.java b/airbyte-notification/src/test/java/io/airbyte/notification/SlackNotificationClientTest.java index bbdeb356c652a..072a53eb48b28 100644 --- a/airbyte-notification/src/test/java/io/airbyte/notification/SlackNotificationClientTest.java +++ b/airbyte-notification/src/test/java/io/airbyte/notification/SlackNotificationClientTest.java @@ -33,6 +33,8 @@ public class SlackNotificationClientTest { private static final Logger LOGGER = LoggerFactory.getLogger(SlackNotificationClientTest.class); + private static final UUID WORKSPACE_ID = UUID.randomUUID(); + private static final UUID CONNECTION_ID = UUID.randomUUID(); public static final String WEBHOOK_URL = "http://localhost:"; private static final String EXPECTED_FAIL_MESSAGE = "Your connection from source-test to destination-test just failed...\n" @@ -120,12 +122,16 @@ void testNotifyJobSuccess() throws IOException, InterruptedException { @Test void testNotifyConnectionDisabled() throws IOException, InterruptedException { - final String expectedNotificationMessage = + final String expectedNotificationMessage = String.format( """ Your connection from source-test to destination-test was automatically disabled because it failed 100 times consecutively or has been failing for 14 days in a row. - Please address the failing issues to ensure your syncs continue to run. The most recent attempted job description You can access its logs here: logUrl. - """; + Please address the failing issues to ensure your syncs continue to run. The most recent attempted job description. + + Workspace ID: %s + Connection ID: %s + """, + WORKSPACE_ID, CONNECTION_ID); server.createContext("/test", new ServerHandler(expectedNotificationMessage)); final SlackNotificationClient client = @@ -133,17 +139,21 @@ void testNotifyConnectionDisabled() throws IOException, InterruptedException { .withNotificationType(NotificationType.SLACK) .withSendOnSuccess(true) .withSlackConfiguration(new SlackNotificationConfiguration().withWebhook(WEBHOOK_URL + server.getAddress().getPort() + "/test"))); - assertTrue(client.notifyConnectionDisabled("", "source-test", "destination-test", "job description", "logUrl")); + assertTrue(client.notifyConnectionDisabled("", "source-test", "destination-test", "job description.", WORKSPACE_ID, CONNECTION_ID)); } @Test void testNotifyConnectionDisabledWarning() throws IOException, InterruptedException { - final String expectedNotificationWarningMessage = + final String expectedNotificationWarningMessage = String.format( """ Your connection from source-test to destination-test is scheduled to be automatically disabled because it either failed 50 times consecutively or there were only failed jobs in the past 7 days. Once it has failed 100 times consecutively or has been failing for 14 days in a row, the connection will be automatically disabled. - Please address the failing issues to ensure your syncs continue to run. The most recent attempted job description You can access its logs here: logUrl. - """; + Please address the failing issues to ensure your syncs continue to run. The most recent attempted job description. + + Workspace ID: %s + Connection ID: %s + """, + WORKSPACE_ID, CONNECTION_ID); server.createContext("/test", new ServerHandler(expectedNotificationWarningMessage)); final SlackNotificationClient client = @@ -151,7 +161,7 @@ void testNotifyConnectionDisabledWarning() throws IOException, InterruptedExcept .withNotificationType(NotificationType.SLACK) .withSendOnSuccess(true) .withSlackConfiguration(new SlackNotificationConfiguration().withWebhook(WEBHOOK_URL + server.getAddress().getPort() + "/test"))); - assertTrue(client.notifyConnectionDisableWarning("", "source-test", "destination-test", "job description", "logUrl")); + assertTrue(client.notifyConnectionDisableWarning("", "source-test", "destination-test", "job description.", WORKSPACE_ID, CONNECTION_ID)); } static class ServerHandler implements HttpHandler { diff --git a/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/JobNotifier.java b/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/JobNotifier.java index b49236b9beecb..afeb83f52679c 100644 --- a/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/JobNotifier.java +++ b/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/JobNotifier.java @@ -118,13 +118,13 @@ private void notifyJob(final String reason, break; case CONNECTION_DISABLED_NOTIFICATION: if (!notificationClient.notifyConnectionDisabled(workspace.getEmail(), sourceConnector, destinationConnector, jobDescription, - logUrl)) { + workspaceId, connectionId)) { LOGGER.warn("Failed to successfully notify auto-disable connection: {}", notification); } break; case CONNECTION_DISABLED_WARNING_NOTIFICATION: if (!notificationClient.notifyConnectionDisableWarning(workspace.getEmail(), sourceConnector, destinationConnector, jobDescription, - logUrl)) { + workspaceId, connectionId)) { LOGGER.warn("Failed to successfully notify auto-disable connection warning: {}", notification); } } From d4f8b25b8e3e109db866352cf1dcec0d73c92cbd Mon Sep 17 00:00:00 2001 From: Denis Davydov Date: Mon, 9 May 2022 18:34:28 +0300 Subject: [PATCH 04/55] Source Google Ads: Improve unit and integration tests (#12651) * #12650 source Googel ads: tests * #12650 source google ads: add changelog item * #12650 source google ads: add comments to tests * auto-bump connector version Co-authored-by: Octavia Squidington III --- .../resources/seed/source_definitions.yaml | 2 +- .../src/main/resources/seed/source_specs.yaml | 2 +- .../connectors/source-google-ads/Dockerfile | 6 +- .../acceptance-test-config.yml | 4 +- .../integration_tests/abnormal_state.json | 2 +- .../integration_tests/test_incremental.py | 137 +++++----- .../connectors/source-google-ads/setup.py | 2 +- .../source_google_ads/custom_query_stream.py | 2 +- .../source_google_ads/streams.py | 8 +- .../source-google-ads/unit_tests/conftest.py | 8 + .../unit_tests/test_source.py | 239 +++++++++++------- docs/integrations/sources/google-ads.md | 3 +- 12 files changed, 238 insertions(+), 177 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index ee0a49db83557..416749b0f5ff9 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -311,7 +311,7 @@ - name: Google Ads sourceDefinitionId: 253487c0-2246-43ba-a21f-5116b20a2c50 dockerRepository: airbyte/source-google-ads - dockerImageTag: 0.1.36 + dockerImageTag: 0.1.37 documentationUrl: https://docs.airbyte.io/integrations/sources/google-ads icon: google-adwords.svg sourceType: api diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index f1611f8874784..5c0fd5ebcea68 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -2688,7 +2688,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-google-ads:0.1.36" +- dockerImage: "airbyte/source-google-ads:0.1.37" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/google-ads" connectionSpecification: diff --git a/airbyte-integrations/connectors/source-google-ads/Dockerfile b/airbyte-integrations/connectors/source-google-ads/Dockerfile index 5d34b4e8a4a4c..2bba65c96c907 100644 --- a/airbyte-integrations/connectors/source-google-ads/Dockerfile +++ b/airbyte-integrations/connectors/source-google-ads/Dockerfile @@ -6,12 +6,12 @@ RUN apt-get update && apt-get install -y bash && rm -rf /var/lib/apt/lists/* ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" WORKDIR /airbyte/integration_code -COPY source_google_ads ./source_google_ads -COPY main.py ./ COPY setup.py ./ RUN pip install . +COPY source_google_ads ./source_google_ads +COPY main.py ./ ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.36 +LABEL io.airbyte.version=0.1.37 LABEL io.airbyte.name=airbyte/source-google-ads diff --git a/airbyte-integrations/connectors/source-google-ads/acceptance-test-config.yml b/airbyte-integrations/connectors/source-google-ads/acceptance-test-config.yml index 96968da1853d3..f316cda4576b7 100644 --- a/airbyte-integrations/connectors/source-google-ads/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-google-ads/acceptance-test-config.yml @@ -28,7 +28,9 @@ tests: configured_catalog_path: "integration_tests/configured_catalog_protobuf_msg.json" # expect_records: # path: "integration_tests/expected_records_msg.txt" - # TODO incremental test is disabled because records output from the report streams can be up to 14 days older than the input state + # These tests are disabled because of the issues https://github.com/airbytehq/airbyte/issues/12665 + # and https://github.com/airbytehq/airbyte/issues/12467. Instead, custom integration tests are implemented. + # As soon as the above issues are resolved, standard SATs can be enabled and custom tests removed. # incremental: # - config_path: "secrets/config.json" # configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-google-ads/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-google-ads/integration_tests/abnormal_state.json index 5f7c2ed7bcaea..c9be4d524f3f2 100644 --- a/airbyte-integrations/connectors/source-google-ads/integration_tests/abnormal_state.json +++ b/airbyte-integrations/connectors/source-google-ads/integration_tests/abnormal_state.json @@ -1,5 +1,5 @@ { "ad_group_ad_report": { - "segments.date": "2021-06-07" + "segments.date": "2221-06-07" } } diff --git a/airbyte-integrations/connectors/source-google-ads/integration_tests/test_incremental.py b/airbyte-integrations/connectors/source-google-ads/integration_tests/test_incremental.py index 63687ec61bfef..75b1d2704bd30 100644 --- a/airbyte-integrations/connectors/source-google-ads/integration_tests/test_incremental.py +++ b/airbyte-integrations/connectors/source-google-ads/integration_tests/test_incremental.py @@ -3,93 +3,88 @@ # import pendulum +import pytest from airbyte_cdk.logger import AirbyteLogger from airbyte_cdk.models import ConfiguredAirbyteCatalog, Type from source_google_ads.source import SourceGoogleAds -SAMPLE_CATALOG = { - "streams": [ - { - "stream": { - "name": "ad_group_ad_report", - "json_schema": { - "type": "object", - "title": "Ad Group Ad Report", - "description": "An ad group ad.", - "properties": { - "accent_color": { - "description": "AccentColor", - "type": ["null", "string"], - "field": "ad_group_ad.ad.legacy_responsive_display_ad.accent_color", - }, - "account_currency_code": { - "description": "AccountCurrencyCode", - "type": ["null", "string"], - "field": "customer.currency_code", - }, - "account_descriptive_name": { - "description": "AccountDescriptiveName", - "type": ["null", "string"], - "field": "customer.descriptive_name", - }, - "segments.date": {"description": "Date", "type": ["null", "string"], "field": "segments.date"}, - }, + +@pytest.fixture +def configured_catalog(): + return { + "streams": [ + { + "stream": { + "name": "ad_group_ad_report", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": True, + "default_cursor_field": ["segments.date"], }, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": True, - "default_cursor_field": ["segments.date"], - }, - "sync_mode": "incremental", - "destination_sync_mode": "overwrite", - "cursor_field": ["segments.date"], - } - ] -} + "sync_mode": "incremental", + "destination_sync_mode": "overwrite", + "cursor_field": ["segments.date"], + } + ] + } + + +GAP_DAYS = 14 + +def test_incremental_sync(config, configured_catalog): + today = pendulum.now().date() + start_date = today.subtract(months=1) + config["start_date"] = start_date.to_date_string() -def test_incremental_sync(config): google_ads_client = SourceGoogleAds() - state = "2021-05-24" - records = google_ads_client.read( - AirbyteLogger(), config, ConfiguredAirbyteCatalog.parse_obj(SAMPLE_CATALOG), {"ad_group_ad_report": {"segments.date": state}} + records = list(google_ads_client.read(AirbyteLogger(), config, ConfiguredAirbyteCatalog.parse_obj(configured_catalog))) + latest_state = None + for record in records[::-1]: + if record and record.type == Type.STATE: + latest_state = record.state.data["ad_group_ad_report"][config["customer_id"]]["segments.date"] + break + + for message in records: + if not message or message.type != Type.RECORD: + continue + cursor_value = message.record.data["segments.date"] + assert cursor_value <= latest_state + assert cursor_value >= start_date.subtract(days=GAP_DAYS).to_date_string() + + # next sync + records = list( + google_ads_client.read( + AirbyteLogger(), + config, + ConfiguredAirbyteCatalog.parse_obj(configured_catalog), + {"ad_group_ad_report": {"segments.date": latest_state}}, + ) ) - current_state = pendulum.parse(state).subtract(days=14).to_date_string() for record in records: - if record and record.type == Type.STATE: - print(record) - temp_state = record.state.data["ad_group_ad_report"] - current_state = ( - temp_state[config["customer_id"]]["segments.date"] if temp_state.get(config["customer_id"]) else temp_state["segments.date"] - ) - if record and record.type == Type.RECORD: - assert record.record.data["segments.date"] >= current_state + if record.type == Type.RECORD: + assert record.record.data["segments.date"] >= pendulum.parse(latest_state).subtract(days=GAP_DAYS).to_date_string() + if record.type == Type.STATE: + assert record.state.data["ad_group_ad_report"][config["customer_id"]]["segments.date"] >= latest_state + - # Next sync - state = "2021-06-04" +def test_abnormally_large_state(config, configured_catalog): + google_ads_client = SourceGoogleAds() records = google_ads_client.read( - AirbyteLogger(), config, ConfiguredAirbyteCatalog.parse_obj(SAMPLE_CATALOG), {"ad_group_ad_report": {"segments.date": state}} + AirbyteLogger(), + config, + ConfiguredAirbyteCatalog.parse_obj(configured_catalog), + {"ad_group_ad_report": {"segments.date": "2222-06-04"}}, ) - current_state = pendulum.parse(state).subtract(days=14).to_date_string() + no_data_records = True + state_records = False for record in records: if record and record.type == Type.STATE: - current_state = record.state.data["ad_group_ad_report"][config["customer_id"]]["segments.date"] + state_records = True if record and record.type == Type.RECORD: - assert record.record.data["segments.date"] >= current_state - - # # Abnormal state - # This part of the test is broken need to understand what is causing this. - # state = "2029-06-04" - # records = google_ads_client.read( - # AirbyteLogger(), config, ConfiguredAirbyteCatalog.parse_obj(SAMPLE_CATALOG), {"ad_group_ad_report": {"segments.date": state}} - # ) - - # no_records = True - # for record in records: - # if record and record.type == Type.STATE: - # assert record.state.data["ad_group_ad_report"]["segments.date"] == state - # if record and record.type == Type.RECORD: - # no_records = False + no_data_records = False - # assert no_records + assert no_data_records + assert state_records diff --git a/airbyte-integrations/connectors/source-google-ads/setup.py b/airbyte-integrations/connectors/source-google-ads/setup.py index 2b828a38b74c1..8d02bb8faaafd 100644 --- a/airbyte-integrations/connectors/source-google-ads/setup.py +++ b/airbyte-integrations/connectors/source-google-ads/setup.py @@ -7,7 +7,7 @@ MAIN_REQUIREMENTS = ["airbyte-cdk~=0.1", "google-ads==14.1.0", "pendulum"] -TEST_REQUIREMENTS = ["pytest~=6.1", "pytest-mock", "freezegun"] +TEST_REQUIREMENTS = ["pytest~=6.1", "pytest-mock", "freezegun", "requests-mock"] setup( name="source_google_ads", diff --git a/airbyte-integrations/connectors/source-google-ads/source_google_ads/custom_query_stream.py b/airbyte-integrations/connectors/source-google-ads/source_google_ads/custom_query_stream.py index 5a6a0270e65d8..82b3a0e5447a2 100644 --- a/airbyte-integrations/connectors/source-google-ads/source_google_ads/custom_query_stream.py +++ b/airbyte-integrations/connectors/source-google-ads/source_google_ads/custom_query_stream.py @@ -34,7 +34,7 @@ def get_query(self, stream_slice: Mapping[str, Any] = None) -> str: return self.insert_segments_date_expr(self.user_defined_query, start_date, end_date) # IncrementalGoogleAdsStream uses get_json_schema a lot while parsing - # responses, caching plaing crucial role for performance here. + # responses, caching playing crucial role for performance here. @lru_cache() def get_json_schema(self) -> Dict[str, Any]: """ diff --git a/airbyte-integrations/connectors/source-google-ads/source_google_ads/streams.py b/airbyte-integrations/connectors/source-google-ads/source_google_ads/streams.py index 43bbdd78c6e63..81b942b56b07c 100644 --- a/airbyte-integrations/connectors/source-google-ads/source_google_ads/streams.py +++ b/airbyte-integrations/connectors/source-google-ads/source_google_ads/streams.py @@ -48,7 +48,7 @@ def chunk_date_range( days_of_data_storage: int = None, range_days: int = None, time_zone=None, -) -> Iterable[Mapping[str, any]]: +) -> Iterable[Optional[Mapping[str, any]]]: """ Passing optional parameter end_date for testing Returns a list of the beginning and ending timestamps of each `range_days` between the start date and now. @@ -64,7 +64,7 @@ def chunk_date_range( # As in to return some state when state in abnormal if start_date > end_date: - start_date = end_date + return [None] # applying conversion window start_date = start_date.subtract(days=conversion_window) @@ -99,7 +99,9 @@ def stream_slices(self, stream_state: Mapping[str, Any] = None, **kwargs) -> Ite self._customer_id = customer_id yield {} - def read_records(self, sync_mode, stream_slice: Mapping[str, Any] = None, **kwargs) -> Iterable[Mapping[str, Any]]: + def read_records(self, sync_mode, stream_slice: Optional[Mapping[str, Any]] = None, **kwargs) -> Iterable[Mapping[str, Any]]: + if not stream_slice: + return [] account_responses = self.google_ads_client.send_request(self.get_query(stream_slice), customer_id=self._customer_id) for response in account_responses: yield from self.parse_response(response) diff --git a/airbyte-integrations/connectors/source-google-ads/unit_tests/conftest.py b/airbyte-integrations/connectors/source-google-ads/unit_tests/conftest.py index d03c2820311d0..8287466b91bb0 100644 --- a/airbyte-integrations/connectors/source-google-ads/unit_tests/conftest.py +++ b/airbyte-integrations/connectors/source-google-ads/unit_tests/conftest.py @@ -11,3 +11,11 @@ def config_fixture(): with open("secrets/config.json", "r") as config_file: return json.load(config_file) + + +@pytest.fixture(autouse=True) +def mock_oauth_call(requests_mock): + yield requests_mock.post( + "https://accounts.google.com/o/oauth2/token", + json={"access_token": "access_token", "refresh_token": "refresh_token", "expires_in": 0}, + ) diff --git a/airbyte-integrations/connectors/source-google-ads/unit_tests/test_source.py b/airbyte-integrations/connectors/source-google-ads/unit_tests/test_source.py index bf4bfa8543316..b13ff2336223b 100644 --- a/airbyte-integrations/connectors/source-google-ads/unit_tests/test_source.py +++ b/airbyte-integrations/connectors/source-google-ads/unit_tests/test_source.py @@ -2,6 +2,9 @@ # Copyright (c) 2021 Airbyte, Inc., all rights reserved. # +from collections import namedtuple +from unittest.mock import Mock + import pytest from airbyte_cdk import AirbyteLogger from freezegun import freeze_time @@ -14,7 +17,70 @@ from .common import MockErroringGoogleAdsClient, MockGoogleAdsClient -# Test chunck date range without end date +@pytest.fixture +def mock_account_info(mocker): + mocker.patch( + "source_google_ads.source.SourceGoogleAds.get_account_info", + Mock(return_value={"customer.manager": False, "customer.time_zone": "Europe/Berlin"}), + ) + + +@pytest.fixture() +def client_mock(config): + google_api = GoogleAds(credentials=config["credentials"], customer_id=config["customer_id"]) + client = AdGroupAdReport( + start_date=config["start_date"], api=google_api, conversion_window_days=config["conversion_window_days"], time_zone="local" + ) + client._customer_id = "1234567890" + return client + + +@pytest.fixture() +def mock_fields_meta_data(): + Node = namedtuple("Node", ["data_type", "name", "enum_values", "is_repeated"]) + nodes = ( + Node("RESOURCE_NAME", "campaign.accessible_bidding_strategy", [], False), + Node( + "ENUM", + "segments.ad_destination_type", + [ + "APP_DEEP_LINK", + "APP_STORE", + "LEAD_FORM", + "LOCATION_LISTING", + "MAP_DIRECTIONS", + "MESSAGE", + "NOT_APPLICABLE", + "PHONE_CALL", + "UNKNOWN", + "UNMODELED_FOR_CONVERSIONS", + "UNSPECIFIED", + "WEBSITE", + "YOUTUBE", + ], + False, + ), + Node("DATE", "campaign.start_date", [], is_repeated=False), + Node("DATE", "campaign.end_date", [], False), + Node("DATE", "segments.date", [], False), + Node( + "ENUM", + "accessible_bidding_strategy.target_impression_share.location", + ["ABSOLUTE_TOP_OF_PAGE", "ANYWHERE_ON_PAGE", "TOP_OF_PAGE", "UNKNOWN", "UNSPECIFIED"], + False, + ), + Node("STRING", "campaign.name", [], False), + Node("DOUBLE", "campaign.optimization_score", [], False), + Node("RESOURCE_NAME", "campaign.resource_name", [], False), + Node("INT32", "campaign.shopping_setting.campaign_priority", [], False), + Node("INT64", "campaign.shopping_setting.merchant_id", [], False), + Node("BOOLEAN", "campaign_budget.explicitly_shared", [], False), + Node("MESSAGE", "bidding_strategy.enhanced_cpc", [], False), + ) + return Mock(get_fields_metadata=Mock(return_value={node.name: node for node in nodes})) + + +# Test chunk date range without end date @freeze_time("2022-01-30") def test_chunk_date_range_without_end_date(): start_date_str = "2022-01-24" @@ -52,39 +118,33 @@ def test_chunk_date_range(): ] == response -def test_streams_count(config): +def test_streams_count(config, mock_account_info): source = SourceGoogleAds() streams = source.streams(config) expected_streams_number = 19 assert len(streams) == expected_streams_number -def test_non_manager_account(): - mock_account_info = {"customer.manager": False} - source = SourceGoogleAds() - is_manager_account = source.is_manager_account(mock_account_info) - assert not is_manager_account - - -def test_manager_account(): - mock_account_info = {"customer.manager": True} +@pytest.mark.parametrize("is_manager_account", (True, False)) +def test_manager_account(is_manager_account): + mock_account_info = {"customer.manager": is_manager_account} source = SourceGoogleAds() - is_manager_account = source.is_manager_account(mock_account_info) - assert is_manager_account - + assert source.is_manager_account(mock_account_info) is is_manager_account -def test_metrics_in_custom_query(): - mock_query = "SELECT customer.id, metrics.conversions, campaign.start_date FROM campaign" - source = SourceGoogleAds() - is_metrics_in_custom_query = source.is_metrics_in_custom_query(mock_query) - assert is_metrics_in_custom_query - -def test_metrics_not_in_custom_query(): - mock_query = "SELECT segments.ad_destination_type, campaign.start_date, campaign.end_date FROM campaign" +@pytest.mark.parametrize( + ( + "query", + "is_metrics_in_query", + ), + ( + ("SELECT customer.id, metrics.conversions, campaign.start_date FROM campaign", True), + ("SELECT segments.ad_destination_type, campaign.start_date, campaign.end_date FROM campaign", False), + ), +) +def test_metrics_in_custom_query(query, is_metrics_in_query): source = SourceGoogleAds() - is_metrics_in_custom_query = source.is_metrics_in_custom_query(mock_query) - assert not is_metrics_in_custom_query + assert source.is_metrics_in_custom_query(query) is is_metrics_in_query def test_time_zone(): @@ -94,59 +154,33 @@ def test_time_zone(): assert time_zone == "local" -# this requires the config because instantiating a stream creates a google client. TODO refactor so client can be mocked. -def test_get_updated_state(config): - google_api = GoogleAds(credentials=config["credentials"], customer_id=config["customer_id"]) - client = AdGroupAdReport( - start_date=config["start_date"], api=google_api, conversion_window_days=config["conversion_window_days"], time_zone="local" - ) - client._customer_id = "1234567890" - +def test_get_updated_state(client_mock): current_state_stream = {} latest_record = {"segments.date": "2020-01-01"} - new_stream_state = client.get_updated_state(current_state_stream, latest_record) + new_stream_state = client_mock.get_updated_state(current_state_stream, latest_record) assert new_stream_state == {"1234567890": {"segments.date": "2020-01-01"}} current_state_stream = {"segments.date": "2020-01-01"} latest_record = {"segments.date": "2020-02-01"} - new_stream_state = client.get_updated_state(current_state_stream, latest_record) + new_stream_state = client_mock.get_updated_state(current_state_stream, latest_record) assert new_stream_state == {"1234567890": {"segments.date": "2020-02-01"}} current_state_stream = {"1234567890": {"segments.date": "2020-02-01"}} latest_record = {"segments.date": "2021-03-03"} - new_stream_state = client.get_updated_state(current_state_stream, latest_record) + new_stream_state = client_mock.get_updated_state(current_state_stream, latest_record) assert new_stream_state == {"1234567890": {"segments.date": "2021-03-03"}} -def get_instance_from_config(config, query): +def stream_instance(query, api_mock, **kwargs): start_date = "2021-03-04" conversion_window_days = 14 - google_api = GoogleAds(credentials=config["credentials"], customer_id=config["customer_id"]) - instance = CustomQuery( - api=google_api, + api=api_mock, conversion_window_days=conversion_window_days, start_date=start_date, - custom_query_config={"query": query, "table_name": "whatever_table"}, - time_zone="local", - ) - return instance - - -# get he instance with a config -def get_instance_from_config_with_end_date(config, query): - start_date = "2021-03-04" - end_date = "2021-04-04" - conversion_window_days = 14 - google_api = GoogleAds(credentials=config["credentials"], customer_id=config["customer_id"]) - - instance = CustomQuery( - api=google_api, - conversion_window_days=conversion_window_days, - start_date=start_date, - end_date=end_date, time_zone="local", custom_query_config={"query": query, "table_name": "whatever_table"}, + **kwargs, ) return instance @@ -156,25 +190,26 @@ def get_instance_from_config_with_end_date(config, query): [ ( """ - SELecT +SELECT campaign.id, campaign.name, campaign.status, - metrics.impressions FROM campaign -wheRe campaign.status = 'PAUSED' + metrics.impressions +FROM campaign +WHERE campaign.status = 'PAUSED' AND metrics.impressions > 100 -order by campaign.status +ORDER BY campaign.status """, ["campaign.id", "campaign.name", "campaign.status", "metrics.impressions"], ), ( """ - SELECT - campaign.accessible_bidding_strategy, - segments.ad_destination_type, - campaign.start_date, - campaign.end_date - FROM campaign +SELECT + campaign.accessible_bidding_strategy, + segments.ad_destination_type, + campaign.start_date, + campaign.end_date +FROM campaign """, ["campaign.accessible_bidding_strategy", "segments.ad_destination_type", "campaign.start_date", "campaign.end_date"], ), @@ -190,40 +225,42 @@ def test_get_query_fields(query, fields): [ ( """ -SELect +SELECT campaign.id, campaign.name, campaign.status, - metrics.impressions FROM campaign -wheRe campaign.status = 'PAUSED' + metrics.impressions +FROM campaign +WHERE campaign.status = 'PAUSED' AND metrics.impressions > 100 -order by campaign.status +ORDER BY campaign.status """, """ -SELect +SELECT campaign.id, campaign.name, campaign.status, - metrics.impressions , segments.date + metrics.impressions +, segments.date FROM campaign -wheRe campaign.status = 'PAUSED' +WHERE campaign.status = 'PAUSED' AND metrics.impressions > 100 AND segments.date BETWEEN '1980-01-01' AND '2000-01-01' -order by campaign.status +ORDER BY campaign.status """, ), ( """ -SELect +SELECT campaign.id, campaign.name, campaign.status, metrics.impressions FROM campaign -order by campaign.status +ORDER BY campaign.status """, """ -SELect +SELECT campaign.id, campaign.name, campaign.status, @@ -232,35 +269,51 @@ def test_get_query_fields(query, fields): FROM campaign WHERE segments.date BETWEEN '1980-01-01' AND '2000-01-01' -order by campaign.status +ORDER BY campaign.status """, ), ( """ -SELect +SELECT campaign.id, campaign.name, campaign.status, - metrics.impressions FROM campaign -wheRe campaign.status = 'PAUSED' + metrics.impressions +FROM campaign +WHERE campaign.status = 'PAUSED' AND metrics.impressions > 100 """, """ -SELect +SELECT campaign.id, campaign.name, campaign.status, - metrics.impressions , segments.date + metrics.impressions +, segments.date FROM campaign -wheRe campaign.status = 'PAUSED' +WHERE campaign.status = 'PAUSED' AND metrics.impressions > 100 AND segments.date BETWEEN '1980-01-01' AND '2000-01-01' """, ), ( - "SELECT campaign.accessible_bidding_strategy, segments.ad_destination_type, campaign.start_date, campaign.end_date FROM campaign", - """SELECT campaign.accessible_bidding_strategy, segments.ad_destination_type, campaign.start_date, campaign.end_date , segments.date + """ +SELECT + campaign.accessible_bidding_strategy, + segments.ad_destination_type, + campaign.start_date, + campaign.end_date +FROM campaign +""", + """ +SELECT + campaign.accessible_bidding_strategy, + segments.ad_destination_type, + campaign.start_date, + campaign.end_date +, segments.date FROM campaign + WHERE segments.date BETWEEN '1980-01-01' AND '2000-01-01' """, ), @@ -270,7 +323,7 @@ def test_insert_date(original_query, expected_query): assert CustomQuery.insert_segments_date_expr(original_query, "1980-01-01", "2000-01-01") == expected_query -def test_get_json_schema_parse_query(config): +def test_get_json_schema_parse_query(mock_fields_meta_data): query = """ SELECT campaign.accessible_bidding_strategy, @@ -287,14 +340,14 @@ def test_get_json_schema_parse_query(config): "segments.date", ] - instance = get_instance_from_config(config=config, query=query) + instance = stream_instance(query=query, api_mock=mock_fields_meta_data) final_schema = instance.get_json_schema() schema_keys = final_schema["properties"] assert set(schema_keys) == set(final_fields) # test 1 # Test get json schema when start and end date are provided in the config file -def test_get_json_schema_parse_query_with_end_date(config): +def test_get_json_schema_parse_query_with_end_date(mock_fields_meta_data): query = """ SELECT campaign.accessible_bidding_strategy, @@ -311,13 +364,13 @@ def test_get_json_schema_parse_query_with_end_date(config): "segments.date", ] - instance = get_instance_from_config_with_end_date(config=config, query=query) + instance = stream_instance(query=query, api_mock=mock_fields_meta_data, end_date="2021-04-04") final_schema = instance.get_json_schema() schema_keys = final_schema["properties"] assert set(schema_keys) == set(final_fields) # test 1 -def test_google_type_conversion(config): +def test_google_type_conversion(mock_fields_meta_data): """ query may be invalid (fields incompatibility did not checked). But we are just testing types, without submitting the query and further steps. @@ -350,7 +403,7 @@ def test_google_type_conversion(config): bidding_strategy.enhanced_cpc FROM campaign """ - instance = get_instance_from_config(config=config, query=query) + instance = stream_instance(query=query, api_mock=mock_fields_meta_data) final_schema = instance.get_json_schema() schema_properties = final_schema.get("properties") for prop, value in schema_properties.items(): diff --git a/docs/integrations/sources/google-ads.md b/docs/integrations/sources/google-ads.md index c6c2b1a38deaf..d038f0023c142 100644 --- a/docs/integrations/sources/google-ads.md +++ b/docs/integrations/sources/google-ads.md @@ -108,7 +108,8 @@ This source is constrained by whatever API limits are set for the Google Ads tha ## CHANGELOG | Version | Date | Pull Request | Subject | -| :------- | :--------- | :------------------------------------------------------- | :------------------------------------------------------------------------------------------- | +| :------- | :--------- |:---------------------------------------------------------|:---------------------------------------------------------------------------------------------| +| `0.1.37` | 2022-05-06 | [12651](https://github.com/airbytehq/airbyte/pull/12651) | Improve integration and unit tests | | `0.1.36` | 2022-04-19 | [12158](https://github.com/airbytehq/airbyte/pull/12158) | Fix `*_labels` streams data type | | `0.1.35` | 2022-04-18 | [9310](https://github.com/airbytehq/airbyte/pull/9310) | Add new fields to reports | | `0.1.34` | 2022-03-29 | [11602](https://github.com/airbytehq/airbyte/pull/11602) | Add budget amount to campaigns stream. | From 39a9f2b9cdecbc98dde029fbf6f8dec3bf55c555 Mon Sep 17 00:00:00 2001 From: Topher Lubaway Date: Mon, 9 May 2022 10:47:25 -0500 Subject: [PATCH 05/55] Bump ejs from 3.1.6 to 3.1.7 in /airbyte-webapp (#12612) Bumps [ejs](https://github.com/mde/ejs) from 3.1.6 to 3.1.7. - [Release notes](https://github.com/mde/ejs/releases) - [Changelog](https://github.com/mde/ejs/blob/main/CHANGELOG.md) - [Commits](https://github.com/mde/ejs/compare/v3.1.6...v3.1.7) --- updated-dependencies: - dependency-name: ejs dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- airbyte-webapp/package-lock.json | 226 ++++++++++++++++++++++++++----- 1 file changed, 193 insertions(+), 33 deletions(-) diff --git a/airbyte-webapp/package-lock.json b/airbyte-webapp/package-lock.json index 5403f395620ce..6b881203f2122 100644 --- a/airbyte-webapp/package-lock.json +++ b/airbyte-webapp/package-lock.json @@ -20069,12 +20069,12 @@ "dev": true }, "node_modules/ejs": { - "version": "3.1.6", - "resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.6.tgz", - "integrity": "sha512-9lt9Zse4hPucPkoP7FHDF0LQAlGyF9JVpnClFLFH3aSSbxmyoqINRpp/9wePWJTUl4KOQwRL72Iw3InHPDkoGw==", + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.7.tgz", + "integrity": "sha512-BIar7R6abbUxDA3bfXrO4DSgwo8I+fB5/1zgujl3HLLjwd6+9iOnrT+t3grn2qbk9vOgBubXOFwX2m9axoFaGw==", "dev": true, "dependencies": { - "jake": "^10.6.1" + "jake": "^10.8.5" }, "bin": { "ejs": "bin/cli.js" @@ -22076,12 +22076,33 @@ "optional": true }, "node_modules/filelist": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/filelist/-/filelist-1.0.2.tgz", - "integrity": "sha512-z7O0IS8Plc39rTCq6i6iHxk43duYOn8uFJiWSewIq0Bww1RNybVHSCjahmcC87ZqAm4OTvFzlzeGu3XAzG1ctQ==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/filelist/-/filelist-1.0.3.tgz", + "integrity": "sha512-LwjCsruLWQULGYKy7TX0OPtrL9kLpojOFKc5VCTxdFTV7w5zbsgqVKfnkKG7Qgjtq50gKfO56hJv88OfcGb70Q==", "dev": true, "dependencies": { - "minimatch": "^3.0.4" + "minimatch": "^5.0.1" + } + }, + "node_modules/filelist/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/filelist/node_modules/minimatch": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.0.1.tgz", + "integrity": "sha512-nLDxIFRyhDblz3qMuq+SoRZED4+miJ/G+tdDrjkkkRnjAsBexeGpgjLEQ0blJy7rHhR2b93rhQY4SvyWu9v03g==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" } }, "node_modules/filesize": { @@ -25139,13 +25160,13 @@ } }, "node_modules/jake": { - "version": "10.8.2", - "resolved": "https://registry.npmjs.org/jake/-/jake-10.8.2.tgz", - "integrity": "sha512-eLpKyrfG3mzvGE2Du8VoPbeSkRry093+tyNjdYaBbJS9v17knImYGNXQCUV0gLxQtF82m3E8iRb/wdSQZLoq7A==", + "version": "10.8.5", + "resolved": "https://registry.npmjs.org/jake/-/jake-10.8.5.tgz", + "integrity": "sha512-sVpxYeuAhWt0OTWITwT98oyV0GsXyMlXCF+3L1SuafBVUIr/uILGRB+NqwkzhgXKvoJpDIpQvqkUALgdmQsQxw==", "dev": true, "dependencies": { - "async": "0.9.x", - "chalk": "^2.4.2", + "async": "^3.2.3", + "chalk": "^4.0.2", "filelist": "^1.0.1", "minimatch": "^3.0.4" }, @@ -25153,15 +25174,85 @@ "jake": "bin/cli.js" }, "engines": { - "node": "*" + "node": ">=10" + } + }, + "node_modules/jake/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, "node_modules/jake/node_modules/async": { - "version": "0.9.2", - "resolved": "https://registry.npmjs.org/async/-/async-0.9.2.tgz", - "integrity": "sha1-rqdNXmHB+JlhO/ZL2mbUx48v0X0=", + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.3.tgz", + "integrity": "sha512-spZRyzKL5l5BZQrr/6m/SqFdBN0q3OCI0f9rjfBzCMBIP4p75P620rR3gTmaksNOhmzgdxcaxdNfMy6anrbM0g==", "dev": true }, + "node_modules/jake/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/jake/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/jake/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/jake/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/jake/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/jest": { "version": "27.4.7", "resolved": "https://registry.npmjs.org/jest/-/jest-27.4.7.tgz", @@ -60298,12 +60389,12 @@ "dev": true }, "ejs": { - "version": "3.1.6", - "resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.6.tgz", - "integrity": "sha512-9lt9Zse4hPucPkoP7FHDF0LQAlGyF9JVpnClFLFH3aSSbxmyoqINRpp/9wePWJTUl4KOQwRL72Iw3InHPDkoGw==", + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.7.tgz", + "integrity": "sha512-BIar7R6abbUxDA3bfXrO4DSgwo8I+fB5/1zgujl3HLLjwd6+9iOnrT+t3grn2qbk9vOgBubXOFwX2m9axoFaGw==", "dev": true, "requires": { - "jake": "^10.6.1" + "jake": "^10.8.5" } }, "electron-to-chromium": { @@ -61851,12 +61942,32 @@ "optional": true }, "filelist": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/filelist/-/filelist-1.0.2.tgz", - "integrity": "sha512-z7O0IS8Plc39rTCq6i6iHxk43duYOn8uFJiWSewIq0Bww1RNybVHSCjahmcC87ZqAm4OTvFzlzeGu3XAzG1ctQ==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/filelist/-/filelist-1.0.3.tgz", + "integrity": "sha512-LwjCsruLWQULGYKy7TX0OPtrL9kLpojOFKc5VCTxdFTV7w5zbsgqVKfnkKG7Qgjtq50gKfO56hJv88OfcGb70Q==", "dev": true, "requires": { - "minimatch": "^3.0.4" + "minimatch": "^5.0.1" + }, + "dependencies": { + "brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0" + } + }, + "minimatch": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.0.1.tgz", + "integrity": "sha512-nLDxIFRyhDblz3qMuq+SoRZED4+miJ/G+tdDrjkkkRnjAsBexeGpgjLEQ0blJy7rHhR2b93rhQY4SvyWu9v03g==", + "dev": true, + "requires": { + "brace-expansion": "^2.0.1" + } + } } }, "filesize": { @@ -64163,22 +64274,71 @@ } }, "jake": { - "version": "10.8.2", - "resolved": "https://registry.npmjs.org/jake/-/jake-10.8.2.tgz", - "integrity": "sha512-eLpKyrfG3mzvGE2Du8VoPbeSkRry093+tyNjdYaBbJS9v17knImYGNXQCUV0gLxQtF82m3E8iRb/wdSQZLoq7A==", + "version": "10.8.5", + "resolved": "https://registry.npmjs.org/jake/-/jake-10.8.5.tgz", + "integrity": "sha512-sVpxYeuAhWt0OTWITwT98oyV0GsXyMlXCF+3L1SuafBVUIr/uILGRB+NqwkzhgXKvoJpDIpQvqkUALgdmQsQxw==", "dev": true, "requires": { - "async": "0.9.x", - "chalk": "^2.4.2", + "async": "^3.2.3", + "chalk": "^4.0.2", "filelist": "^1.0.1", "minimatch": "^3.0.4" }, "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, "async": { - "version": "0.9.2", - "resolved": "https://registry.npmjs.org/async/-/async-0.9.2.tgz", - "integrity": "sha1-rqdNXmHB+JlhO/ZL2mbUx48v0X0=", + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.3.tgz", + "integrity": "sha512-spZRyzKL5l5BZQrr/6m/SqFdBN0q3OCI0f9rjfBzCMBIP4p75P620rR3gTmaksNOhmzgdxcaxdNfMy6anrbM0g==", "dev": true + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } } } }, From b9b75f387ec634860ea9c8d902a0913f94f7bfd8 Mon Sep 17 00:00:00 2001 From: Octavia Squidington III <90398440+octavia-squidington-iii@users.noreply.github.com> Date: Mon, 9 May 2022 13:01:54 -0300 Subject: [PATCH 06/55] Bump Airbyte version from 0.37.0-alpha to 0.37.1-alpha (#12703) Co-authored-by: terencecho --- .bumpversion.cfg | 2 +- .env | 2 +- airbyte-bootloader/Dockerfile | 2 +- airbyte-container-orchestrator/Dockerfile | 2 +- airbyte-metrics/reporter/Dockerfile | 2 +- airbyte-scheduler/app/Dockerfile | 2 +- airbyte-server/Dockerfile | 2 +- airbyte-webapp/package-lock.json | 4 ++-- airbyte-webapp/package.json | 2 +- airbyte-workers/Dockerfile | 2 +- charts/airbyte/Chart.yaml | 2 +- charts/airbyte/README.md | 10 +++++----- charts/airbyte/values.yaml | 10 +++++----- docs/operator-guides/upgrading-airbyte.md | 2 +- kube/overlays/stable-with-resource-limits/.env | 2 +- .../stable-with-resource-limits/kustomization.yaml | 12 ++++++------ kube/overlays/stable/.env | 2 +- kube/overlays/stable/kustomization.yaml | 12 ++++++------ octavia-cli/Dockerfile | 2 +- octavia-cli/README.md | 2 +- octavia-cli/install.sh | 2 +- octavia-cli/setup.py | 2 +- 22 files changed, 41 insertions(+), 41 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 7b5c34babbbb6..2989b05fd76dd 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.37.0-alpha +current_version = 0.37.1-alpha commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\-[a-z]+)? diff --git a/.env b/.env index 69a2d6ccaac49..7419c1d23c533 100644 --- a/.env +++ b/.env @@ -10,7 +10,7 @@ ### SHARED ### -VERSION=0.37.0-alpha +VERSION=0.37.1-alpha # When using the airbyte-db via default docker image CONFIG_ROOT=/data diff --git a/airbyte-bootloader/Dockerfile b/airbyte-bootloader/Dockerfile index ff03500e09465..014db939009ba 100644 --- a/airbyte-bootloader/Dockerfile +++ b/airbyte-bootloader/Dockerfile @@ -1,7 +1,7 @@ ARG JDK_VERSION=17.0.1 FROM openjdk:${JDK_VERSION}-slim -ARG VERSION=0.37.0-alpha +ARG VERSION=0.37.1-alpha ENV APPLICATION airbyte-bootloader ENV VERSION ${VERSION} diff --git a/airbyte-container-orchestrator/Dockerfile b/airbyte-container-orchestrator/Dockerfile index 20602dc431ecf..01d2c29f4f31b 100644 --- a/airbyte-container-orchestrator/Dockerfile +++ b/airbyte-container-orchestrator/Dockerfile @@ -26,7 +26,7 @@ RUN echo "deb [signed-by=/usr/share/keyrings/kubernetes-archive-keyring.gpg] htt RUN apt-get update && apt-get install -y kubectl # Don't change this manually. Bump version expects to make moves based on this string -ARG VERSION=0.37.0-alpha +ARG VERSION=0.37.1-alpha ENV APPLICATION airbyte-container-orchestrator ENV VERSION=${VERSION} diff --git a/airbyte-metrics/reporter/Dockerfile b/airbyte-metrics/reporter/Dockerfile index f7b0f0acfde6c..ff0f2e6f9e035 100644 --- a/airbyte-metrics/reporter/Dockerfile +++ b/airbyte-metrics/reporter/Dockerfile @@ -1,7 +1,7 @@ ARG JDK_VERSION=17.0.1 FROM openjdk:${JDK_VERSION}-slim AS metrics-reporter -ARG VERSION=0.37.0-alpha +ARG VERSION=0.37.1-alpha ENV APPLICATION airbyte-metrics-reporter ENV VERSION ${VERSION} diff --git a/airbyte-scheduler/app/Dockerfile b/airbyte-scheduler/app/Dockerfile index cbb20fa93bcbd..e7ec7006c7099 100644 --- a/airbyte-scheduler/app/Dockerfile +++ b/airbyte-scheduler/app/Dockerfile @@ -1,7 +1,7 @@ ARG JDK_VERSION=17.0.1 FROM openjdk:${JDK_VERSION}-slim AS scheduler -ARG VERSION=0.37.0-alpha +ARG VERSION=0.37.1-alpha ENV APPLICATION airbyte-scheduler ENV VERSION ${VERSION} diff --git a/airbyte-server/Dockerfile b/airbyte-server/Dockerfile index d26b365461920..ded7fcf0492ea 100644 --- a/airbyte-server/Dockerfile +++ b/airbyte-server/Dockerfile @@ -3,7 +3,7 @@ FROM openjdk:${JDK_VERSION}-slim AS server EXPOSE 8000 -ARG VERSION=0.37.0-alpha +ARG VERSION=0.37.1-alpha ENV APPLICATION airbyte-server ENV VERSION ${VERSION} diff --git a/airbyte-webapp/package-lock.json b/airbyte-webapp/package-lock.json index 6b881203f2122..bc56a01f14f4b 100644 --- a/airbyte-webapp/package-lock.json +++ b/airbyte-webapp/package-lock.json @@ -1,12 +1,12 @@ { "name": "airbyte-webapp", - "version": "0.37.0-alpha", + "version": "0.37.1-alpha", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "airbyte-webapp", - "version": "0.37.0-alpha", + "version": "0.37.1-alpha", "dependencies": { "@fortawesome/fontawesome-svg-core": "^6.1.1", "@fortawesome/free-brands-svg-icons": "^6.1.1", diff --git a/airbyte-webapp/package.json b/airbyte-webapp/package.json index f358e52110b71..06352b6f969b8 100644 --- a/airbyte-webapp/package.json +++ b/airbyte-webapp/package.json @@ -1,6 +1,6 @@ { "name": "airbyte-webapp", - "version": "0.37.0-alpha", + "version": "0.37.1-alpha", "private": true, "engines": { "node": ">=16.0.0" diff --git a/airbyte-workers/Dockerfile b/airbyte-workers/Dockerfile index 1aa68bdffe93a..5fa2a01b23084 100644 --- a/airbyte-workers/Dockerfile +++ b/airbyte-workers/Dockerfile @@ -25,7 +25,7 @@ RUN curl -fsSLo /usr/share/keyrings/kubernetes-archive-keyring.gpg https://packa RUN echo "deb [signed-by=/usr/share/keyrings/kubernetes-archive-keyring.gpg] https://apt.kubernetes.io/ kubernetes-xenial main" | tee /etc/apt/sources.list.d/kubernetes.list RUN apt-get update && apt-get install -y kubectl -ARG VERSION=0.37.0-alpha +ARG VERSION=0.37.1-alpha ENV APPLICATION airbyte-workers ENV VERSION ${VERSION} diff --git a/charts/airbyte/Chart.yaml b/charts/airbyte/Chart.yaml index 08ef09f6e233f..1df7f1b1568f5 100644 --- a/charts/airbyte/Chart.yaml +++ b/charts/airbyte/Chart.yaml @@ -21,7 +21,7 @@ version: 0.3.2 # incremented each time you make changes to the application. Versions are not expected to # follow Semantic Versioning. They should reflect the version the application is using. # It is recommended to use it with quotes. -appVersion: "0.37.0-alpha" +appVersion: "0.37.1-alpha" dependencies: - name: common diff --git a/charts/airbyte/README.md b/charts/airbyte/README.md index ea7994a354fc8..3f9bfef69a996 100644 --- a/charts/airbyte/README.md +++ b/charts/airbyte/README.md @@ -30,7 +30,7 @@ Helm charts for Airbyte. | `webapp.replicaCount` | Number of webapp replicas | `1` | | `webapp.image.repository` | The repository to use for the airbyte webapp image. | `airbyte/webapp` | | `webapp.image.pullPolicy` | the pull policy to use for the airbyte webapp image | `IfNotPresent` | -| `webapp.image.tag` | The airbyte webapp image tag. Defaults to the chart's AppVersion | `0.37.0-alpha` | +| `webapp.image.tag` | The airbyte webapp image tag. Defaults to the chart's AppVersion | `0.37.1-alpha` | | `webapp.podAnnotations` | Add extra annotations to the webapp pod(s) | `{}` | | `webapp.containerSecurityContext` | Security context for the container | `{}` | | `webapp.livenessProbe.enabled` | Enable livenessProbe on the webapp | `true` | @@ -72,7 +72,7 @@ Helm charts for Airbyte. | `scheduler.replicaCount` | Number of scheduler replicas | `1` | | `scheduler.image.repository` | The repository to use for the airbyte scheduler image. | `airbyte/scheduler` | | `scheduler.image.pullPolicy` | the pull policy to use for the airbyte scheduler image | `IfNotPresent` | -| `scheduler.image.tag` | The airbyte scheduler image tag. Defaults to the chart's AppVersion | `0.37.0-alpha` | +| `scheduler.image.tag` | The airbyte scheduler image tag. Defaults to the chart's AppVersion | `0.37.1-alpha` | | `scheduler.podAnnotations` | Add extra annotations to the scheduler pod | `{}` | | `scheduler.containerSecurityContext` | Security context for the container | `{}` | | `scheduler.livenessProbe.enabled` | Enable livenessProbe on the scheduler | `true` | @@ -135,7 +135,7 @@ Helm charts for Airbyte. | `server.replicaCount` | Number of server replicas | `1` | | `server.image.repository` | The repository to use for the airbyte server image. | `airbyte/server` | | `server.image.pullPolicy` | the pull policy to use for the airbyte server image | `IfNotPresent` | -| `server.image.tag` | The airbyte server image tag. Defaults to the chart's AppVersion | `0.37.0-alpha` | +| `server.image.tag` | The airbyte server image tag. Defaults to the chart's AppVersion | `0.37.1-alpha` | | `server.podAnnotations` | Add extra annotations to the server pod | `{}` | | `server.containerSecurityContext` | Security context for the container | `{}` | | `server.livenessProbe.enabled` | Enable livenessProbe on the server | `true` | @@ -170,7 +170,7 @@ Helm charts for Airbyte. | `worker.replicaCount` | Number of worker replicas | `1` | | `worker.image.repository` | The repository to use for the airbyte worker image. | `airbyte/worker` | | `worker.image.pullPolicy` | the pull policy to use for the airbyte worker image | `IfNotPresent` | -| `worker.image.tag` | The airbyte worker image tag. Defaults to the chart's AppVersion | `0.37.0-alpha` | +| `worker.image.tag` | The airbyte worker image tag. Defaults to the chart's AppVersion | `0.37.1-alpha` | | `worker.podAnnotations` | Add extra annotations to the worker pod(s) | `{}` | | `worker.containerSecurityContext` | Security context for the container | `{}` | | `worker.livenessProbe.enabled` | Enable livenessProbe on the worker | `true` | @@ -202,7 +202,7 @@ Helm charts for Airbyte. | ----------------------------- | -------------------------------------------------------------------- | -------------------- | | `bootloader.image.repository` | The repository to use for the airbyte bootloader image. | `airbyte/bootloader` | | `bootloader.image.pullPolicy` | the pull policy to use for the airbyte bootloader image | `IfNotPresent` | -| `bootloader.image.tag` | The airbyte bootloader image tag. Defaults to the chart's AppVersion | `0.37.0-alpha` | +| `bootloader.image.tag` | The airbyte bootloader image tag. Defaults to the chart's AppVersion | `0.37.1-alpha` | | `bootloader.podAnnotations` | Add extra annotations to the bootloader pod | `{}` | | `bootloader.nodeSelector` | Node labels for pod assignment | `{}` | | `bootloader.tolerations` | Tolerations for worker pod assignment. | `[]` | diff --git a/charts/airbyte/values.yaml b/charts/airbyte/values.yaml index 2377d363eabd0..19ba227aad67e 100644 --- a/charts/airbyte/values.yaml +++ b/charts/airbyte/values.yaml @@ -41,7 +41,7 @@ webapp: image: repository: airbyte/webapp pullPolicy: IfNotPresent - tag: 0.37.0-alpha + tag: 0.37.1-alpha ## @param webapp.podAnnotations [object] Add extra annotations to the webapp pod(s) ## @@ -207,7 +207,7 @@ scheduler: image: repository: airbyte/scheduler pullPolicy: IfNotPresent - tag: 0.37.0-alpha + tag: 0.37.1-alpha ## @param scheduler.podAnnotations [object] Add extra annotations to the scheduler pod ## @@ -438,7 +438,7 @@ server: image: repository: airbyte/server pullPolicy: IfNotPresent - tag: 0.37.0-alpha + tag: 0.37.1-alpha ## @param server.podAnnotations [object] Add extra annotations to the server pod ## @@ -565,7 +565,7 @@ worker: image: repository: airbyte/worker pullPolicy: IfNotPresent - tag: 0.37.0-alpha + tag: 0.37.1-alpha ## @param worker.podAnnotations [object] Add extra annotations to the worker pod(s) ## @@ -683,7 +683,7 @@ bootloader: image: repository: airbyte/bootloader pullPolicy: IfNotPresent - tag: 0.37.0-alpha + tag: 0.37.1-alpha ## @param bootloader.podAnnotations [object] Add extra annotations to the bootloader pod ## diff --git a/docs/operator-guides/upgrading-airbyte.md b/docs/operator-guides/upgrading-airbyte.md index 9de4ebe6b3c51..40f623d854e56 100644 --- a/docs/operator-guides/upgrading-airbyte.md +++ b/docs/operator-guides/upgrading-airbyte.md @@ -103,7 +103,7 @@ If you are upgrading from (i.e. your current version of Airbyte is) Airbyte vers Here's an example of what it might look like with the values filled in. It assumes that the downloaded `airbyte_archive.tar.gz` is in `/tmp`. ```bash - docker run --rm -v /tmp:/config airbyte/migration:0.37.0-alpha --\ + docker run --rm -v /tmp:/config airbyte/migration:0.37.1-alpha --\ --input /config/airbyte_archive.tar.gz\ --output /config/airbyte_archive_migrated.tar.gz ``` diff --git a/kube/overlays/stable-with-resource-limits/.env b/kube/overlays/stable-with-resource-limits/.env index b1d44cd369222..7f9beea2321fe 100644 --- a/kube/overlays/stable-with-resource-limits/.env +++ b/kube/overlays/stable-with-resource-limits/.env @@ -1,4 +1,4 @@ -AIRBYTE_VERSION=0.37.0-alpha +AIRBYTE_VERSION=0.37.1-alpha # Airbyte Internal Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db DATABASE_HOST=airbyte-db-svc diff --git a/kube/overlays/stable-with-resource-limits/kustomization.yaml b/kube/overlays/stable-with-resource-limits/kustomization.yaml index c3e6ee2e5bec8..89e66d5d5bc14 100644 --- a/kube/overlays/stable-with-resource-limits/kustomization.yaml +++ b/kube/overlays/stable-with-resource-limits/kustomization.yaml @@ -8,17 +8,17 @@ bases: images: - name: airbyte/db - newTag: 0.37.0-alpha + newTag: 0.37.1-alpha - name: airbyte/bootloader - newTag: 0.37.0-alpha + newTag: 0.37.1-alpha - name: airbyte/scheduler - newTag: 0.37.0-alpha + newTag: 0.37.1-alpha - name: airbyte/server - newTag: 0.37.0-alpha + newTag: 0.37.1-alpha - name: airbyte/webapp - newTag: 0.37.0-alpha + newTag: 0.37.1-alpha - name: airbyte/worker - newTag: 0.37.0-alpha + newTag: 0.37.1-alpha - name: temporalio/auto-setup newTag: 1.7.0 diff --git a/kube/overlays/stable/.env b/kube/overlays/stable/.env index 73cc2fd5ff721..a84d36649409b 100644 --- a/kube/overlays/stable/.env +++ b/kube/overlays/stable/.env @@ -1,4 +1,4 @@ -AIRBYTE_VERSION=0.37.0-alpha +AIRBYTE_VERSION=0.37.1-alpha # Airbyte Internal Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db DATABASE_HOST=airbyte-db-svc diff --git a/kube/overlays/stable/kustomization.yaml b/kube/overlays/stable/kustomization.yaml index 2e51988f14409..d512effa07a14 100644 --- a/kube/overlays/stable/kustomization.yaml +++ b/kube/overlays/stable/kustomization.yaml @@ -8,17 +8,17 @@ bases: images: - name: airbyte/db - newTag: 0.37.0-alpha + newTag: 0.37.1-alpha - name: airbyte/bootloader - newTag: 0.37.0-alpha + newTag: 0.37.1-alpha - name: airbyte/scheduler - newTag: 0.37.0-alpha + newTag: 0.37.1-alpha - name: airbyte/server - newTag: 0.37.0-alpha + newTag: 0.37.1-alpha - name: airbyte/webapp - newTag: 0.37.0-alpha + newTag: 0.37.1-alpha - name: airbyte/worker - newTag: 0.37.0-alpha + newTag: 0.37.1-alpha - name: temporalio/auto-setup newTag: 1.7.0 diff --git a/octavia-cli/Dockerfile b/octavia-cli/Dockerfile index d6a218cdd60a0..30f7fae5c4367 100644 --- a/octavia-cli/Dockerfile +++ b/octavia-cli/Dockerfile @@ -14,5 +14,5 @@ USER octavia-cli WORKDIR /home/octavia-project ENTRYPOINT ["octavia"] -LABEL io.airbyte.version=0.37.0-alpha +LABEL io.airbyte.version=0.37.1-alpha LABEL io.airbyte.name=airbyte/octavia-cli diff --git a/octavia-cli/README.md b/octavia-cli/README.md index cff94fd12363d..ecb5fdd3ca544 100644 --- a/octavia-cli/README.md +++ b/octavia-cli/README.md @@ -105,7 +105,7 @@ This script: ```bash touch ~/.octavia # Create a file to store env variables that will be mapped the octavia-cli container mkdir my_octavia_project_directory # Create your octavia project directory where YAML configurations will be stored. -docker run --name octavia-cli -i --rm -v my_octavia_project_directory:/home/octavia-project --network host --user $(id -u):$(id -g) --env-file ~/.octavia airbyte/octavia-cli:0.37.0-alpha +docker run --name octavia-cli -i --rm -v my_octavia_project_directory:/home/octavia-project --network host --user $(id -u):$(id -g) --env-file ~/.octavia airbyte/octavia-cli:0.37.1-alpha ``` ### Using `docker-compose` diff --git a/octavia-cli/install.sh b/octavia-cli/install.sh index bcc8c64326447..3a80c50568e8b 100755 --- a/octavia-cli/install.sh +++ b/octavia-cli/install.sh @@ -3,7 +3,7 @@ # This install scripts currently only works for ZSH and Bash profiles. # It creates an octavia alias in your profile bound to a docker run command and your current user. -VERSION=0.37.0-alpha +VERSION=0.37.1-alpha OCTAVIA_ENV_FILE=${HOME}/.octavia detect_profile() { diff --git a/octavia-cli/setup.py b/octavia-cli/setup.py index 5da5665f07de0..1955ad7271293 100644 --- a/octavia-cli/setup.py +++ b/octavia-cli/setup.py @@ -15,7 +15,7 @@ setup( name="octavia-cli", - version="0.37.0", + version="0.37.1", description="A command line interface to manage Airbyte configurations", long_description=README, author="Airbyte", From ebb9f3e1acbeef9aa34deecc7aa59a6966beeb06 Mon Sep 17 00:00:00 2001 From: Jonathan Pearlin Date: Mon, 9 May 2022 15:26:54 -0400 Subject: [PATCH 07/55] Prepare Database Access Layer for Dependency Injection (#12546) * Prepare database access objects for dependency injection * Replace duplicate code * Remove unused imports * Remove redundant validation call * Remove unused imports * Use constants * Disable fast fail during connection pool initialization * Remove typo * Add missing test dependency * Add missing test dependency * Add missing test dependency * Fix issue caused by rebase * Add method for cloud * Autoclose DSL context during migration * Better connection close handling * Fix typo in dependency * Fix SpotBugs issue * React to rebase * Fix typo * Update JavaDoc * Fix database close calls * Pass configs to getServer * Fix typo * Fix call to removed method * Fix typo * Use catalog to manage versions * PR feedback * Centralize shutdown hook * Fix rebase issues * Document test cases * Document test cases * Formatting * Properly close database resources * Rebase cleanup --- airbyte-bootloader/build.gradle | 4 +- .../io/airbyte/bootloader/BootloaderApp.java | 156 ++++++---- .../airbyte/bootloader/BootloaderAppTest.java | 291 +++++++++++------- .../commons/lang/CloseableShutdownHook.java | 63 ++++ .../lang/CloseableShutdownHookTest.java | 29 ++ airbyte-config/persistence/build.gradle | 3 +- .../split_secrets/SecretPersistence.java | 23 +- .../BaseDatabaseConfigPersistenceTest.java | 6 + .../ConfigRepositoryE2EReadWriteTest.java | 18 +- ...baseConfigPersistenceE2EReadWriteTest.java | 20 +- ...DatabaseConfigPersistenceLoadDataTest.java | 22 +- .../DatabaseConfigPersistenceTest.java | 22 +- ...istenceUpdateConnectorDefinitionsTest.java | 21 +- airbyte-container-orchestrator/build.gradle | 6 +- airbyte-db/jooq/build.gradle | 13 +- airbyte-db/lib/build.gradle | 14 +- .../src/main/java/io/airbyte/db/Database.java | 80 +++-- .../main/java/io/airbyte/db/Databases.java | 52 ++-- .../airbyte/db/ExceptionWrappingDatabase.java | 7 +- .../TempBigQueryJoolDatabaseImpl.java | 16 +- .../airbyte/db/factory/DSLContextFactory.java | 39 +++ .../airbyte/db/factory/DataSourceFactory.java | 31 +- .../io/airbyte/db/factory/DatabaseDriver.java | 58 ++++ .../io/airbyte/db/factory/FlywayFactory.java | 38 ++- .../db/instance/BaseDatabaseInstance.java | 41 +-- .../db/instance/FlywayDatabaseMigrator.java | 35 --- .../db/instance/FlywayMigrationDatabase.java | 25 +- .../configs/ConfigsDatabaseInstance.java | 18 +- .../ConfigsDatabaseMigrationDevCenter.java | 19 +- .../configs/ConfigsDatabaseMigrator.java | 5 +- .../configs/ConfigsDatabaseTestProvider.java | 19 +- .../ConfigsFlywayMigrationDatabase.java | 25 +- .../V0_30_22_001__Store_last_sync_state.java | 7 +- .../development/MigrationDevCenter.java | 58 +++- .../instance/jobs/JobsDatabaseInstance.java | 9 +- .../jobs/JobsDatabaseMigrationDevCenter.java | 20 +- .../instance/jobs/JobsDatabaseMigrator.java | 5 +- .../jobs/JobsDatabaseTestProvider.java | 19 +- .../jobs/JobsFlywayMigrationDatabase.java | 25 +- .../instance/test/TestDatabaseProviders.java | 60 ++-- .../java/io/airbyte/db/PostgresUtilsTest.java | 22 +- .../db/factory/DSLContextFactoryTest.java | 32 +- .../db/factory/DataSourceFactoryTest.java | 14 +- .../airbyte/db/factory/FlywayFactoryTest.java | 26 +- .../db/instance/AbstractDatabaseTest.java | 37 ++- .../db/instance/BaseDatabaseInstanceTest.java | 20 +- .../configs/AbstractConfigsDatabaseTest.java | 6 +- .../configs/ConfigsDatabaseMigratorTest.java | 6 +- ...30_22_001__Store_last_sync_state_test.java | 22 +- ...yteConfigDatabaseDenormalization_Test.java | 17 +- ...01__AddTombstoneToActorDefinitionTest.java | 5 +- ...eAndReleaseDateToActorDefinition_Test.java | 5 +- ...__RemoveForeignKeyFromActorOauth_Test.java | 4 +- ..._26_001__PersistDiscoveredCatalogTest.java | 5 +- ...1__AddActorCatalogMetadataColumnsTest.java | 5 +- ...35_3_001__DropAirbyteConfigsTableTest.java | 4 +- ...9_001__AddPublicToActorDefinitionTest.java | 5 +- ...ctorDefinitionWorkspaceGrantTableTest.java | 5 +- ...9_003__AddCustomToActorDefinitionTest.java | 5 +- .../jobs/AbstractJobsDatabaseTest.java | 6 +- .../jobs/JobsDatabaseInstanceTest.java | 11 +- .../jobs/JobsDatabaseMigratorTest.java | 6 +- ...1_MigrateFailureReasonEnumValues_Test.java | 4 +- ...dd_failureSummary_col_to_AttemptsTest.java | 4 +- .../instance/toys/ToysDatabaseInstance.java | 5 +- .../instance/toys/ToysDatabaseMigrator.java | 5 +- .../toys/ToysDatabaseMigratorTest.java | 13 +- .../db/jdbc/TestDefaultJdbcDatabase.java | 15 +- .../io/airbyte/db/jdbc/TestJdbcUtils.java | 22 +- .../db/jdbc/TestStreamingJdbcDatabase.java | 20 +- .../bases/base-java/build.gradle | 6 +- .../BufferedStreamConsumerTest.java | 2 +- .../AbstractSourceDatabaseTypeTest.java | 2 - .../AbstractSourceFillDbWithTestData.java | 3 - .../destination-cassandra/build.gradle | 3 +- .../build.gradle | 4 +- ...estinationStrictEncryptAcceptanceTest.java | 11 +- .../destination-clickhouse/build.gradle | 4 +- .../clickhouse/ClickhouseDestination.java | 3 +- .../ClickhouseDestinationAcceptanceTest.java | 23 +- ...shClickhouseDestinationAcceptanceTest.java | 23 +- .../clickhouse/ClickhouseDestinationTest.java | 22 +- .../databricks/DatabricksDestination.java | 11 +- .../DatabricksDestinationAcceptanceTest.java | 11 +- .../destination-elasticsearch/build.gradle | 6 +- .../connectors/destination-gcs/build.gradle | 1 + .../connectors/destination-jdbc/build.gradle | 4 +- .../jdbc/AbstractJdbcDestination.java | 18 +- .../jdbc/SqlOperationsUtilsTest.java | 17 +- .../connectors/destination-kafka/build.gradle | 2 +- .../build.gradle | 2 +- .../MariadbColumnstoreDestination.java | 7 +- ...bColumnstoreDestinationAcceptanceTest.java | 23 +- ...bColumnstoreDestinationAcceptanceTest.java | 35 ++- .../destination-meilisearch/build.gradle | 2 +- .../build.gradle | 2 +- .../destination-mongodb/build.gradle | 2 +- .../mongodb/MongodbRecordConsumer.java | 4 +- .../build.gradle | 3 +- ...trictEncryptDestinationAcceptanceTest.java | 18 +- .../connectors/destination-mssql/build.gradle | 3 +- .../destination/mssql/MSSQLDestination.java | 3 +- .../mssql/MSSQLDestinationAcceptanceTest.java | 24 +- .../MSSQLDestinationAcceptanceTestSSL.java | 22 +- .../SshMSSQLDestinationAcceptanceTest.java | 14 +- .../build.gradle | 2 +- ...trictEncryptDestinationAcceptanceTest.java | 26 +- .../connectors/destination-mysql/build.gradle | 2 +- .../destination/mysql/MySQLDestination.java | 3 +- .../mysql/MySQLDestinationAcceptanceTest.java | 27 +- .../SshMySQLDestinationAcceptanceTest.java | 12 +- .../SslMySQLDestinationAcceptanceTest.java | 22 +- .../build.gradle | 2 +- ...trictEncryptDestinationAcceptanceTest.java | 95 +++--- .../destination-oracle/build.gradle | 2 +- .../destination/oracle/OracleDestination.java | 3 +- .../NneOracleDestinationAcceptanceTest.java | 44 ++- .../SshOracleDestinationAcceptanceTest.java | 18 +- ...ryptedOracleDestinationAcceptanceTest.java | 37 +-- .../build.gradle | 2 +- ...estinationStrictEncryptAcceptanceTest.java | 16 +- .../destination-postgres/build.gradle | 4 +- .../postgres/PostgresDestination.java | 3 +- .../PostgresDestinationAcceptanceTest.java | 16 +- .../SshPostgresDestinationAcceptanceTest.java | 21 +- .../postgres/PostgresDestinationTest.java | 2 +- .../destination-pulsar/build.gradle | 2 +- .../connectors/destination-redis/build.gradle | 3 +- .../redshift/RedshiftInsertDestination.java | 14 +- ...RedshiftCopyDestinationAcceptanceTest.java | 25 +- .../connectors/destination-s3/build.gradle | 1 + .../destination-scylla/build.gradle | 3 +- .../source/bigquery/BigQuerySource.java | 3 +- .../build.gradle | 2 +- ...StrictEncryptJdbcSourceAcceptanceTest.java | 20 +- .../connectors/source-clickhouse/build.gradle | 2 +- ...ractSshClickHouseSourceAcceptanceTest.java | 23 +- .../ClickHouseSourceAcceptanceTest.java | 23 +- ...SslClickHouseJdbcSourceAcceptanceTest.java | 20 +- .../build.gradle | 8 +- ...ockroachDbEncryptSourceAcceptanceTest.java | 42 +-- .../source-cockroachdb/build.gradle | 4 +- .../source/cockroachdb/CockroachDbSource.java | 23 +- .../CockroachDbSourceAcceptanceTest.java | 41 +-- .../CockroachDbSourceDatatypeTest.java | 18 +- .../CockroachDbJdbcSourceAcceptanceTest.java | 19 +- .../cockroachdb/CockroachDbSourceTest.java | 107 ++++--- .../source-db2-strict-encrypt/build.gradle | 2 +- .../Db2StrictEncryptSource.java | 3 +- ...ncryptSourceCertificateAcceptanceTest.java | 16 +- .../connectors/source-db2/build.gradle | 2 +- .../Db2Source.java | 7 +- .../sources/Db2SourceAcceptanceTest.java | 41 +-- .../Db2SourceCertificateAcceptanceTest.java | 16 +- .../sources/Db2SourceDatatypeTest.java | 18 +- .../connectors/source-jdbc/build.gradle | 6 +- .../source/jdbc/AbstractJdbcSource.java | 23 +- .../jdbc/JdbcSourceSourceAcceptanceTest.java | 17 +- .../AbstractJdbcSourceAcceptanceTest.java | 7 +- .../source/jdbc/DefaultJdbcStressTest.java | 7 +- .../source/jdbc/JdbcSourceStressTest.java | 7 +- .../jdbc/test/JdbcSourceAcceptanceTest.java | 22 +- .../source/jdbc/test/JdbcStressTest.java | 16 +- .../connectors/source-kafka/build.gradle | 2 +- .../connectors/source-mongodb-v2/build.gradle | 2 +- .../MongoDbSource.java | 7 +- .../source-mssql-strict-encrypt/build.gradle | 2 +- ...ssqlStrictEncryptSourceAcceptanceTest.java | 14 +- ...StrictEncryptJdbcSourceAcceptanceTest.java | 21 +- .../connectors/source-mssql/build.gradle | 4 +- .../source/mssql/MssqlSource.java | 3 +- .../AbstractSshMssqlSourceAcceptanceTest.java | 16 +- .../mssql/CdcMssqlSourceAcceptanceTest.java | 16 +- .../mssql/CdcMssqlSourceDatatypeTest.java | 30 +- .../mssql/MssqlRdsSourceAcceptanceTest.java | 16 +- .../mssql/MssqlSourceAcceptanceTest.java | 16 +- .../source/mssql/MssqlSourceDatatypeTest.java | 16 +- .../SslEnabledMssqlSourceAcceptanceTest.java | 12 +- .../mssql/FillMsSqlTestDbScriptTest.java | 11 +- .../source/mssql/CdcMssqlSourceTest.java | 37 ++- .../mssql/MssqlJdbcSourceAcceptanceTest.java | 21 +- .../source/mssql/MssqlSourceTest.java | 21 +- .../source/mssql/MssqlStressTest.java | 21 +- .../source-mysql-strict-encrypt/build.gradle | 2 +- ...ySqlStrictEncryptSourceAcceptanceTest.java | 35 ++- ...StrictEncryptJdbcSourceAcceptanceTest.java | 19 +- .../connectors/source-mysql/build.gradle | 2 +- .../source/mysql/MySqlSource.java | 3 +- .../mysql/CdcMySqlSourceAcceptanceTest.java | 14 +- .../mysql/CdcMySqlSourceDatatypeTest.java | 31 +- .../mysql/MySqlSourceAcceptanceTest.java | 35 ++- .../source/mysql/MySqlSourceDatatypeTest.java | 25 +- .../mysql/MySqlSslSourceAcceptanceTest.java | 35 ++- .../mysql/FillMySqlTestDbScriptTest.java | 25 +- .../MySqlRdsSourcePerformanceSecretTest.java | 26 +- .../mysql/MySqlJdbcSourceAcceptanceTest.java | 17 +- .../MySqlSslJdbcSourceAcceptanceTest.java | 31 +- .../source/mysql/MySqlStressTest.java | 18 +- .../source-oracle-strict-encrypt/build.gradle | 2 +- .../OracleSourceNneAcceptanceTest.java | 50 +-- ...acleStrictEncryptSourceAcceptanceTest.java | 24 +- .../connectors/source-oracle/build.gradle | 2 +- .../source/oracle/OracleSource.java | 3 +- ...AbstractSshOracleSourceAcceptanceTest.java | 22 +- .../oracle/OracleSourceAcceptanceTest.java | 22 +- .../oracle/OracleSourceDatatypeTest.java | 15 +- .../oracle/OracleSourceNneAcceptanceTest.java | 69 +++-- .../source/oracle/OracleSourceTest.java | 22 +- .../build.gradle | 2 +- ...gresSourceStrictEncryptAcceptanceTest.java | 35 ++- .../connectors/source-postgres/build.gradle | 4 +- .../source/postgres/PostgresSource.java | 3 +- ...stractSshPostgresSourceAcceptanceTest.java | 35 ++- .../CdcPostgresSourceAcceptanceTest.java | 46 +-- .../CdcPostgresSourceDatatypeTest.java | 16 +- .../sources/PostgresSourceAcceptanceTest.java | 37 +-- .../sources/PostgresSourceDatatypeTest.java | 16 +- .../FillPostgresTestDbScriptTest.java | 16 +- .../postgres/CdcPostgresSourceTest.java | 49 +-- .../postgres/PostgresSourceSSLTest.java | 57 ++-- .../source/postgres/PostgresSourceTest.java | 86 +++--- .../source/postgres/PostgresStressTest.java | 7 +- .../source/redshift/RedshiftSource.java | 7 +- .../sources/RedshiftSourceAcceptanceTest.java | 23 +- .../RedshiftSslSourceAcceptanceTest.java | 28 +- .../source-relational-db/build.gradle | 4 +- .../SnowflakeSource.java | 3 +- .../SnowflakeSourceAcceptanceTest.java | 24 +- .../sources/SnowflakeSourceDatatypeTest.java | 19 +- .../connectors/source-tidb/build.gradle | 4 +- .../integrations/source/tidb/TiDBSource.java | 7 +- .../source/tidb/TiDBSourceAcceptanceTest.java | 45 +-- airbyte-metrics/lib/build.gradle | 3 +- ...eriesTest.java => MetricsQueriesTest.java} | 21 +- .../airbyte/metrics/reporter/ReporterApp.java | 32 +- airbyte-scheduler/app/build.gradle | 2 +- .../airbyte/scheduler/app/SchedulerApp.java | 131 ++++---- airbyte-scheduler/persistence/build.gradle | 5 +- .../DefaultJobPersistenceTest.java | 20 +- airbyte-server/build.gradle | 9 +- .../server/ConfigurationApiFactory.java | 13 +- .../java/io/airbyte/server/ServerApp.java | 72 ++++- .../java/io/airbyte/server/ServerFactory.java | 13 +- .../airbyte/server/apis/ConfigurationApi.java | 9 +- .../server/handlers/DbMigrationHandler.java | 7 +- .../server/apis/ConfigurationApiTest.java | 5 +- .../server/handlers/ArchiveHandlerTest.java | 20 +- airbyte-test-utils/build.gradle | 5 +- .../utils/CockroachDBContainerHelper.java | 36 +-- .../test/utils/DatabaseConnectionHelper.java | 48 +++ .../test/utils/PostgreSQLContainerHelper.java | 36 +-- .../utils/DatabaseConnectionHelperTest.java | 55 ++++ airbyte-tests/build.gradle | 8 +- .../test/acceptance/AcceptanceTests.java | 7 +- .../test/acceptance/GKEPostgresConfig.java | 10 +- airbyte-workers/build.gradle | 8 +- .../java/io/airbyte/workers/WorkerApp.java | 43 ++- .../TemporalAttemptExecutionTest.java | 12 +- deps.toml | 26 ++ 259 files changed, 3196 insertions(+), 1960 deletions(-) create mode 100644 airbyte-commons/src/main/java/io/airbyte/commons/lang/CloseableShutdownHook.java create mode 100644 airbyte-commons/src/test/java/io/airbyte/commons/lang/CloseableShutdownHookTest.java create mode 100644 airbyte-db/lib/src/main/java/io/airbyte/db/factory/DatabaseDriver.java rename airbyte-metrics/lib/src/test/java/io/airbyte/metrics/lib/{MetrisQueriesTest.java => MetricsQueriesTest.java} (97%) create mode 100644 airbyte-test-utils/src/main/java/io/airbyte/test/utils/DatabaseConnectionHelper.java create mode 100644 airbyte-test-utils/src/test/java/io/airbyte/test/utils/DatabaseConnectionHelperTest.java diff --git a/airbyte-bootloader/build.gradle b/airbyte-bootloader/build.gradle index 4fb63889cba5f..77cf5c872fd3f 100644 --- a/airbyte-bootloader/build.gradle +++ b/airbyte-bootloader/build.gradle @@ -14,9 +14,9 @@ dependencies { implementation project(':airbyte-scheduler:persistence') implementation 'io.temporal:temporal-sdk:1.8.1' - implementation "org.flywaydb:flyway-core:7.14.0" + implementation libs.flyway.core - testImplementation "org.testcontainers:postgresql:1.15.3" + testImplementation libs.testcontainers.postgresql testImplementation 'uk.org.webcompere:system-stubs-jupiter:1.2.0' } diff --git a/airbyte-bootloader/src/main/java/io/airbyte/bootloader/BootloaderApp.java b/airbyte-bootloader/src/main/java/io/airbyte/bootloader/BootloaderApp.java index 9a89a9507b83c..089f5c0f9d417 100644 --- a/airbyte-bootloader/src/main/java/io/airbyte/bootloader/BootloaderApp.java +++ b/airbyte-bootloader/src/main/java/io/airbyte/bootloader/BootloaderApp.java @@ -4,9 +4,9 @@ package io.airbyte.bootloader; -import com.google.common.annotations.VisibleForTesting; import io.airbyte.commons.features.EnvVariableFeatureFlags; import io.airbyte.commons.features.FeatureFlags; +import io.airbyte.commons.lang.CloseableShutdownHook; import io.airbyte.commons.resources.MoreResources; import io.airbyte.commons.version.AirbyteVersion; import io.airbyte.config.Configs; @@ -19,6 +19,10 @@ import io.airbyte.config.persistence.split_secrets.JsonSecretsProcessor; import io.airbyte.config.persistence.split_secrets.SecretPersistence; import io.airbyte.db.Database; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; +import io.airbyte.db.factory.FlywayFactory; import io.airbyte.db.instance.DatabaseMigrator; import io.airbyte.db.instance.configs.ConfigsDatabaseInstance; import io.airbyte.db.instance.configs.ConfigsDatabaseMigrator; @@ -30,6 +34,10 @@ import java.io.IOException; import java.util.Optional; import java.util.UUID; +import javax.sql.DataSource; +import org.flywaydb.core.Flyway; +import org.jooq.DSLContext; +import org.jooq.SQLDialect; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -49,20 +57,18 @@ public class BootloaderApp { private static final Logger LOGGER = LoggerFactory.getLogger(BootloaderApp.class); private static final AirbyteVersion VERSION_BREAK = new AirbyteVersion("0.32.0-alpha"); + private static final String DRIVER_CLASS_NAME = DatabaseDriver.POSTGRESQL.getDriverClassName(); private final Configs configs; private final Runnable postLoadExecution; private final FeatureFlags featureFlags; - private SecretMigrator secretMigrator; + private final SecretMigrator secretMigrator; private ConfigPersistence configPersistence; private Database configDatabase; private Database jobDatabase; private JobPersistence jobPersistence; - - @VisibleForTesting - public BootloaderApp(final Configs configs, final FeatureFlags featureFlags) { - this(configs, () -> {}, featureFlags, null); - } + private final Flyway configsFlyway; + private final Flyway jobsFlyway; /** * This method is exposed for Airbyte Cloud consumption. This lets us override the seed loading @@ -71,81 +77,94 @@ public BootloaderApp(final Configs configs, final FeatureFlags featureFlags) { * * @param configs * @param postLoadExecution + * @param featureFlags + * @param secretMigrator + * @param configsDslContext */ public BootloaderApp(final Configs configs, final Runnable postLoadExecution, final FeatureFlags featureFlags, - final SecretMigrator secretMigrator) { + final SecretMigrator secretMigrator, + final DSLContext configsDslContext, + final DSLContext jobsDslContext, + final Flyway configsFlyway, + final Flyway jobsFlyway) { this.configs = configs; this.postLoadExecution = postLoadExecution; this.featureFlags = featureFlags; this.secretMigrator = secretMigrator; + this.configsFlyway = configsFlyway; + this.jobsFlyway = jobsFlyway; - initPersistences(); + initPersistences(configsDslContext, jobsDslContext); } - public BootloaderApp(final Configs configs, final FeatureFlags featureFlags, final SecretMigrator secretMigrator) { + public BootloaderApp(final Configs configs, + final FeatureFlags featureFlags, + final SecretMigrator secretMigrator, + final DSLContext configsDslContext, + final DSLContext jobsDslContext, + final Flyway configsFlyway, + final Flyway jobsFlyway) { this.configs = configs; this.featureFlags = featureFlags; + this.secretMigrator = secretMigrator; + this.configsFlyway = configsFlyway; + this.jobsFlyway = jobsFlyway; - initPersistences(); + initPersistences(configsDslContext, jobsDslContext); postLoadExecution = () -> { try { configPersistence.loadData(YamlSeedConfigPersistence.getDefault()); if (featureFlags.forceSecretMigration() || !jobPersistence.isSecretMigrated()) { - secretMigrator.migrateSecrets(); + if (this.secretMigrator != null) { + this.secretMigrator.migrateSecrets(); + LOGGER.info("Secrets successfully migrated."); + } } LOGGER.info("Loaded seed data.."); } catch (final IOException | JsonValidationException e) { throw new RuntimeException(e); } }; - } public void load() throws Exception { - LOGGER.info("Setting up config database and default workspace.."); + LOGGER.info("Setting up config database and default workspace..."); + final JobPersistence jobPersistence = new DefaultJobPersistence(jobDatabase); + final AirbyteVersion currAirbyteVersion = configs.getAirbyteVersion(); + assertNonBreakingMigration(jobPersistence, currAirbyteVersion); - try { - LOGGER.info("Created initial jobs and configs database..."); - - final JobPersistence jobPersistence = new DefaultJobPersistence(jobDatabase); - final AirbyteVersion currAirbyteVersion = configs.getAirbyteVersion(); - assertNonBreakingMigration(jobPersistence, currAirbyteVersion); + // TODO Will be converted to an injected singleton during DI migration + final DatabaseMigrator configDbMigrator = new ConfigsDatabaseMigrator(configDatabase, configsFlyway); + final DatabaseMigrator jobDbMigrator = new JobsDatabaseMigrator(jobDatabase, jobsFlyway); - runFlywayMigration(configs, configDatabase, jobDatabase); - LOGGER.info("Ran Flyway migrations..."); + runFlywayMigration(configs, configDbMigrator, jobDbMigrator); + LOGGER.info("Ran Flyway migrations."); - final ConfigRepository configRepository = - new ConfigRepository(configPersistence, configDatabase); + final ConfigRepository configRepository = + new ConfigRepository(configPersistence, configDatabase); - createWorkspaceIfNoneExists(configRepository); - LOGGER.info("Default workspace created.."); + createWorkspaceIfNoneExists(configRepository); + LOGGER.info("Default workspace created."); - createDeploymentIfNoneExists(jobPersistence); - LOGGER.info("Default deployment created.."); + createDeploymentIfNoneExists(jobPersistence); + LOGGER.info("Default deployment created."); - jobPersistence.setVersion(currAirbyteVersion.serialize()); - LOGGER.info("Set version to {}", currAirbyteVersion); + jobPersistence.setVersion(currAirbyteVersion.serialize()); + LOGGER.info("Set version to {}", currAirbyteVersion); - postLoadExecution.run(); - } finally { - jobDatabase.close(); - configDatabase.close(); - } + postLoadExecution.run(); - LOGGER.info("Finished running post load Execution.."); + LOGGER.info("Finished running post load Execution."); - LOGGER.info("Finished bootstrapping Airbyte environment.."); + LOGGER.info("Finished bootstrapping Airbyte environment."); } - private static Database getConfigDatabase(final Configs configs) throws IOException { - return new ConfigsDatabaseInstance( - configs.getConfigDatabaseUser(), - configs.getConfigDatabasePassword(), - configs.getConfigDatabaseUrl()).getAndInitialize(); + private static Database getConfigDatabase(final DSLContext dslContext) throws IOException { + return new ConfigsDatabaseInstance(dslContext).getAndInitialize(); } private static ConfigPersistence getConfigPersistence(final Database configDatabase) throws IOException { @@ -157,36 +176,56 @@ private static ConfigPersistence getConfigPersistence(final Database configDatab return DatabaseConfigPersistence.createWithValidation(configDatabase, jsonSecretsProcessor); } - private static Database getJobDatabase(final Configs configs) throws IOException { - return new JobsDatabaseInstance(configs.getDatabaseUser(), configs.getDatabasePassword(), configs.getDatabaseUrl()).getAndInitialize(); + private static Database getJobDatabase(final DSLContext dslContext) throws IOException { + return new JobsDatabaseInstance(dslContext).getAndInitialize(); } private static JobPersistence getJobPersistence(final Database jobDatabase) throws IOException { return new DefaultJobPersistence(jobDatabase); } - private void initPersistences() { + private void initPersistences(final DSLContext configsDslContext, final DSLContext jobsDslContext) { try { - configDatabase = getConfigDatabase(configs); + configDatabase = getConfigDatabase(configsDslContext); configPersistence = getConfigPersistence(configDatabase); - jobDatabase = getJobDatabase(configs); + jobDatabase = getJobDatabase(jobsDslContext); jobPersistence = getJobPersistence(jobDatabase); } catch (final IOException e) { - e.printStackTrace(); + LOGGER.error("Unable to initialize persistence.", e); } } public static void main(final String[] args) throws Exception { final Configs configs = new EnvConfigs(); final FeatureFlags featureFlags = new EnvVariableFeatureFlags(); - final Database configDatabase = getConfigDatabase(configs); - final ConfigPersistence configPersistence = getConfigPersistence(configDatabase); - final Database jobDatabase = getJobDatabase(configs); - final JobPersistence jobPersistence = getJobPersistence(jobDatabase); - final SecretMigrator secretMigrator = new SecretMigrator(configPersistence, jobPersistence, SecretPersistence.getLongLived(configs)); - final Optional secretPersistence = SecretPersistence.getLongLived(configs); - final var bootloader = new BootloaderApp(configs, featureFlags, secretMigrator); - bootloader.load(); + + // Manual configuration that will be replaced by Dependency Injection in the future + final DataSource configsDataSource = DataSourceFactory.create(configs.getConfigDatabaseUser(), configs.getConfigDatabasePassword(), + DRIVER_CLASS_NAME, configs.getConfigDatabaseUrl()); + final DataSource jobsDataSource = + DataSourceFactory.create(configs.getDatabaseUser(), configs.getDatabasePassword(), DRIVER_CLASS_NAME, configs.getDatabaseUrl()); + + try (final DSLContext configsDslContext = DSLContextFactory.create(configsDataSource, SQLDialect.POSTGRES); + final DSLContext jobsDslContext = DSLContextFactory.create(configsDataSource, SQLDialect.POSTGRES)) { + + // TODO Will be converted to an injected singleton during DI migration + final Database configDatabase = getConfigDatabase(configsDslContext); + final ConfigPersistence configPersistence = getConfigPersistence(configDatabase); + final Database jobDatabase = getJobDatabase(jobsDslContext); + final JobPersistence jobPersistence = getJobPersistence(jobDatabase); + final SecretMigrator secretMigrator = + new SecretMigrator(configPersistence, jobPersistence, SecretPersistence.getLongLived(configsDslContext, configs)); + final Flyway configsFlyway = FlywayFactory.create(configsDataSource, BootloaderApp.class.getSimpleName(), ConfigsDatabaseMigrator.DB_IDENTIFIER, + ConfigsDatabaseMigrator.MIGRATION_FILE_LOCATION); + final Flyway jobsFlyway = FlywayFactory.create(jobsDataSource, BootloaderApp.class.getSimpleName(), JobsDatabaseMigrator.DB_IDENTIFIER, + JobsDatabaseMigrator.MIGRATION_FILE_LOCATION); + + // Ensure that the database resources are closed on application shutdown + CloseableShutdownHook.registerRuntimeShutdownHook(configsDataSource, jobsDataSource, configsDslContext, jobsDslContext); + + final var bootloader = new BootloaderApp(configs, featureFlags, secretMigrator, configsDslContext, jobsDslContext, configsFlyway, jobsFlyway); + bootloader.load(); + } } private static void createDeploymentIfNoneExists(final JobPersistence jobPersistence) throws IOException { @@ -253,10 +292,7 @@ static boolean isLegalUpgrade(final AirbyteVersion airbyteDatabaseVersion, final return !isUpgradingThroughVersionBreak; } - private static void runFlywayMigration(final Configs configs, final Database configDatabase, final Database jobDatabase) { - final DatabaseMigrator configDbMigrator = new ConfigsDatabaseMigrator(configDatabase, BootloaderApp.class.getSimpleName()); - final DatabaseMigrator jobDbMigrator = new JobsDatabaseMigrator(jobDatabase, BootloaderApp.class.getSimpleName()); - + private static void runFlywayMigration(final Configs configs, final DatabaseMigrator configDbMigrator, final DatabaseMigrator jobDbMigrator) { configDbMigrator.createBaseline(); jobDbMigrator.createBaseline(); diff --git a/airbyte-bootloader/src/test/java/io/airbyte/bootloader/BootloaderAppTest.java b/airbyte-bootloader/src/test/java/io/airbyte/bootloader/BootloaderAppTest.java index 439cd790a68a0..7436ac1ceb75f 100644 --- a/airbyte-bootloader/src/test/java/io/airbyte/bootloader/BootloaderAppTest.java +++ b/airbyte-bootloader/src/test/java/io/airbyte/bootloader/BootloaderAppTest.java @@ -29,16 +29,25 @@ import io.airbyte.config.persistence.split_secrets.JsonSecretsProcessor; import io.airbyte.config.persistence.split_secrets.SecretPersistence; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.FlywayFactory; import io.airbyte.db.instance.configs.ConfigsDatabaseInstance; import io.airbyte.db.instance.configs.ConfigsDatabaseMigrator; import io.airbyte.db.instance.jobs.JobsDatabaseInstance; import io.airbyte.db.instance.jobs.JobsDatabaseMigrator; import io.airbyte.scheduler.persistence.DefaultJobPersistence; +import java.io.Closeable; +import java.io.IOException; import java.util.Optional; import java.util.UUID; import java.util.concurrent.atomic.AtomicBoolean; +import javax.sql.DataSource; import lombok.val; +import org.flywaydb.core.Flyway; +import org.jooq.SQLDialect; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.testcontainers.containers.PostgreSQLContainer; @@ -49,16 +58,36 @@ @ExtendWith(SystemStubsExtension.class) public class BootloaderAppTest { - @SystemStub - private EnvironmentVariables environmentVariables; + private PostgreSQLContainer container; + private DataSource configsDataSource; + private DataSource jobsDataSource; - @Test - void testBootloaderAppBlankDb() throws Exception { - val container = new PostgreSQLContainer<>("postgres:13-alpine") + @BeforeEach + void setup() { + container = new PostgreSQLContainer<>("postgres:13-alpine") .withDatabaseName("public") .withUsername("docker") .withPassword("docker"); container.start(); + + configsDataSource = + DataSourceFactory.create(container.getUsername(), container.getPassword(), container.getDriverClassName(), container.getJdbcUrl()); + jobsDataSource = + DataSourceFactory.create(container.getUsername(), container.getPassword(), container.getDriverClassName(), container.getJdbcUrl()); + } + + @AfterEach + void cleanup() throws IOException { + closeDataSource(configsDataSource); + closeDataSource(jobsDataSource); + container.stop(); + } + + @SystemStub + private EnvironmentVariables environmentVariables; + + @Test + void testBootloaderAppBlankDb() throws Exception { val version = "0.33.0-alpha"; val mockedConfigs = mock(Configs.class); @@ -83,37 +112,33 @@ void testBootloaderAppBlankDb() throws Exception { environmentVariables.set("DATABASE_PASSWORD", "docker"); environmentVariables.set("DATABASE_URL", container.getJdbcUrl()); - val bootloader = new BootloaderApp(mockedConfigs, mockedFeatureFlags); - bootloader.load(); + try (val configsDslContext = DSLContextFactory.create(configsDataSource, SQLDialect.POSTGRES); + val jobsDslContext = DSLContextFactory.create(configsDataSource, SQLDialect.POSTGRES)) { + + val configsFlyway = createConfigsFlyway(configsDataSource); + val jobsFlyway = createJobsFlyway(jobsDataSource); + + val bootloader = + new BootloaderApp(mockedConfigs, mockedFeatureFlags, mockedSecretMigrator, configsDslContext, jobsDslContext, configsFlyway, jobsFlyway); + bootloader.load(); - val jobDatabase = new JobsDatabaseInstance( - container.getUsername(), - container.getPassword(), - container.getJdbcUrl()).getInitialized(); - val jobsMigrator = new JobsDatabaseMigrator(jobDatabase, this.getClass().getName()); - assertEquals("0.35.62.001", jobsMigrator.getLatestMigration().getVersion().getVersion()); + val jobDatabase = new JobsDatabaseInstance(jobsDslContext).getInitialized(); + val jobsMigrator = new JobsDatabaseMigrator(jobDatabase, jobsFlyway); + assertEquals("0.35.62.001", jobsMigrator.getLatestMigration().getVersion().getVersion()); - val configDatabase = new ConfigsDatabaseInstance( - mockedConfigs.getConfigDatabaseUser(), - mockedConfigs.getConfigDatabasePassword(), - mockedConfigs.getConfigDatabaseUrl()) - .getAndInitialize(); - val configsMigrator = new ConfigsDatabaseMigrator(configDatabase, this.getClass().getName()); - assertEquals("0.35.65.001", configsMigrator.getLatestMigration().getVersion().getVersion()); + val configDatabase = new ConfigsDatabaseInstance(configsDslContext).getAndInitialize(); + val configsMigrator = new ConfigsDatabaseMigrator(configDatabase, configsFlyway); + assertEquals("0.35.65.001", configsMigrator.getLatestMigration().getVersion().getVersion()); - val jobsPersistence = new DefaultJobPersistence(jobDatabase); - assertEquals(version, jobsPersistence.getVersion().get()); + val jobsPersistence = new DefaultJobPersistence(jobDatabase); + assertEquals(version, jobsPersistence.getVersion().get()); - assertNotEquals(Optional.empty(), jobsPersistence.getDeployment().get()); + assertNotEquals(Optional.empty(), jobsPersistence.getDeployment().get()); + } } @Test void testBootloaderAppRunSecretMigration() throws Exception { - val container = new PostgreSQLContainer<>("postgres:13-alpine") - .withDatabaseName("public") - .withUsername("docker") - .withPassword("docker"); - container.start(); val version = "0.33.0-alpha"; val mockedConfigs = mock(Configs.class); @@ -134,85 +159,102 @@ void testBootloaderAppRunSecretMigration() throws Exception { .copySecrets(true) .maskSecrets(true) .build(); - final Database configDatabase = Databases.createPostgresDatabase(container.getUsername(), container.getPassword(), container.getJdbcUrl()); - final ConfigPersistence configPersistence = new DatabaseConfigPersistence(configDatabase, jsonSecretsProcessor); - - val jobsPersistence = new DefaultJobPersistence(configDatabase); - val spiedSecretMigrator = spy(new SecretMigrator(configPersistence, jobsPersistence, SecretPersistence.getLongLived(mockedConfigs))); - - // Although we are able to inject mocked configs into the Bootloader, a particular migration in the - // configs database - // requires the env var to be set. Flyway prevents injection, so we dynamically set this instead. - environmentVariables.set("DATABASE_USER", "docker"); - environmentVariables.set("DATABASE_PASSWORD", "docker"); - environmentVariables.set("DATABASE_URL", container.getJdbcUrl()); - - val initBootloader = new BootloaderApp(mockedConfigs, mockedFeatureFlags); - initBootloader.load(); - - final ConfigPersistence localSchema = YamlSeedConfigPersistence.getDefault(); - final ConfigRepository configRepository = new ConfigRepository(configPersistence, configDatabase); - configRepository.loadDataNoSecrets(localSchema); - - final String sourceSpecs = """ - { - "account_id": "1234567891234567", - "start_date": "2022-04-01T00:00:00Z", - "access_token": "nonhiddensecret", - "include_deleted": false, - "fetch_thumbnail_images": false - } - - """; - - final ObjectMapper mapper = new ObjectMapper(); - - final UUID workspaceId = UUID.randomUUID(); - configRepository.writeStandardWorkspace(new StandardWorkspace() - .withWorkspaceId(workspaceId) - .withName("wName") - .withSlug("wSlug") - .withEmail("email@mail.com") - .withTombstone(false) - .withInitialSetupComplete(false)); - final UUID sourceId = UUID.randomUUID(); - configRepository.writeSourceConnectionNoSecrets(new SourceConnection() - .withSourceDefinitionId(UUID.fromString("e7778cfc-e97c-4458-9ecb-b4f2bba8946c")) // Facebook Marketing - .withSourceId(sourceId) - .withName("test source") - .withWorkspaceId(workspaceId) - .withConfiguration(mapper.readTree(sourceSpecs))); - - when(mockedFeatureFlags.forceSecretMigration()).thenReturn(false); - var bootloader = new BootloaderApp(mockedConfigs, mockedFeatureFlags, spiedSecretMigrator); - boolean isMigrated = jobsPersistence.isSecretMigrated(); - - assertFalse(isMigrated); - - bootloader.load(); - verify(spiedSecretMigrator).migrateSecrets(); - - final SourceConnection sourceConnection = configRepository.getSourceConnection(sourceId); - - assertFalse(sourceConnection.getConfiguration().toString().contains("nonhiddensecret")); - assertTrue(sourceConnection.getConfiguration().toString().contains("_secret")); - - isMigrated = jobsPersistence.isSecretMigrated(); - assertTrue(isMigrated); - - reset(spiedSecretMigrator); - // We need to re-create the bootloader because it is closing the persistence after running load - bootloader = new BootloaderApp(mockedConfigs, mockedFeatureFlags, spiedSecretMigrator); - bootloader.load(); - verifyNoInteractions(spiedSecretMigrator); - - reset(spiedSecretMigrator); - when(mockedFeatureFlags.forceSecretMigration()).thenReturn(true); - // We need to re-create the bootloader because it is closing the persistence after running load - bootloader = new BootloaderApp(mockedConfigs, mockedFeatureFlags, spiedSecretMigrator); - bootloader.load(); - verify(spiedSecretMigrator).migrateSecrets(); + try (val configsDslContext = DSLContextFactory.create(configsDataSource, SQLDialect.POSTGRES); + val jobsDslContext = DSLContextFactory.create(configsDataSource, SQLDialect.POSTGRES)) { + + val configsFlyway = createConfigsFlyway(configsDataSource); + val jobsFlyway = createJobsFlyway(jobsDataSource); + + final Database configDatabase = new Database(configsDslContext); + final ConfigPersistence configPersistence = new DatabaseConfigPersistence(configDatabase, jsonSecretsProcessor); + + val jobsPersistence = new DefaultJobPersistence(configDatabase); + + val spiedSecretMigrator = + spy(new SecretMigrator(configPersistence, jobsPersistence, SecretPersistence.getLongLived(configsDslContext, mockedConfigs))); + + // Although we are able to inject mocked configs into the Bootloader, a particular migration in the + // configs database requires the env var to be set. Flyway prevents injection, so we dynamically set + // this instead. + environmentVariables.set("DATABASE_USER", "docker"); + environmentVariables.set("DATABASE_PASSWORD", "docker"); + environmentVariables.set("DATABASE_URL", container.getJdbcUrl()); + + // Bootstrap the database for the test + val initBootloader = new BootloaderApp(mockedConfigs, mockedFeatureFlags, null, configsDslContext, jobsDslContext, configsFlyway, jobsFlyway); + initBootloader.load(); + + final ConfigPersistence localSchema = YamlSeedConfigPersistence.getDefault(); + final ConfigRepository configRepository = new ConfigRepository(configPersistence, configDatabase); + configRepository.loadDataNoSecrets(localSchema); + + final String sourceSpecs = """ + { + "account_id": "1234567891234567", + "start_date": "2022-04-01T00:00:00Z", + "access_token": "nonhiddensecret", + "include_deleted": false, + "fetch_thumbnail_images": false + } + + """; + + final ObjectMapper mapper = new ObjectMapper(); + + final UUID workspaceId = UUID.randomUUID(); + configRepository.writeStandardWorkspace(new StandardWorkspace() + .withWorkspaceId(workspaceId) + .withName("wName") + .withSlug("wSlug") + .withEmail("email@mail.com") + .withTombstone(false) + .withInitialSetupComplete(false)); + final UUID sourceId = UUID.randomUUID(); + configRepository.writeSourceConnectionNoSecrets(new SourceConnection() + .withSourceDefinitionId(UUID.fromString("e7778cfc-e97c-4458-9ecb-b4f2bba8946c")) // Facebook Marketing + .withSourceId(sourceId) + .withName("test source") + .withWorkspaceId(workspaceId) + .withConfiguration(mapper.readTree(sourceSpecs))); + + when(mockedFeatureFlags.forceSecretMigration()).thenReturn(false); + + // Perform secrets migration + var bootloader = + new BootloaderApp(mockedConfigs, mockedFeatureFlags, spiedSecretMigrator, configsDslContext, jobsDslContext, configsFlyway, jobsFlyway); + boolean isMigrated = jobsPersistence.isSecretMigrated(); + + assertFalse(isMigrated); + + bootloader.load(); + verify(spiedSecretMigrator).migrateSecrets(); + + final SourceConnection sourceConnection = configRepository.getSourceConnection(sourceId); + + assertFalse(sourceConnection.getConfiguration().toString().contains("nonhiddensecret")); + assertTrue(sourceConnection.getConfiguration().toString().contains("_secret")); + + isMigrated = jobsPersistence.isSecretMigrated(); + assertTrue(isMigrated); + + // Verify that the migration does not happen if it has already been performed + reset(spiedSecretMigrator); + // We need to re-create the bootloader because it is closing the persistence after running load + bootloader = + new BootloaderApp(mockedConfigs, mockedFeatureFlags, spiedSecretMigrator, configsDslContext, jobsDslContext, configsFlyway, jobsFlyway); + bootloader.load(); + verifyNoInteractions(spiedSecretMigrator); + + // Verify that the migration occurs if the force migration feature flag is enabled + reset(spiedSecretMigrator); + when(mockedFeatureFlags.forceSecretMigration()).thenReturn(true); + // We need to re-create the bootloader because it is closing the persistence after running load + bootloader = + new BootloaderApp(mockedConfigs, mockedFeatureFlags, spiedSecretMigrator, configsDslContext, jobsDslContext, configsFlyway, jobsFlyway); + bootloader.load(); + verify(spiedSecretMigrator).migrateSecrets(); + } } @Test @@ -240,12 +282,6 @@ void testIsLegalUpgradePredicate() { @Test void testPostLoadExecutionExecutes() throws Exception { final var testTriggered = new AtomicBoolean(); - - val container = new PostgreSQLContainer<>("postgres:13-alpine") - .withDatabaseName("public") - .withUsername("docker") - .withPassword("docker"); - container.start(); val version = "0.33.0-alpha"; val mockedConfigs = mock(Configs.class); @@ -263,9 +299,34 @@ void testPostLoadExecutionExecutes() throws Exception { val mockedSecretMigrator = mock(SecretMigrator.class); - new BootloaderApp(mockedConfigs, () -> testTriggered.set(true), mockedFeatureFlags, mockedSecretMigrator).load(); + try (val configsDslContext = DSLContextFactory.create(configsDataSource, SQLDialect.POSTGRES); + val jobsDslContext = DSLContextFactory.create(configsDataSource, SQLDialect.POSTGRES)) { + + val configsFlyway = createConfigsFlyway(configsDataSource); + val jobsFlyway = createJobsFlyway(jobsDataSource); + + new BootloaderApp(mockedConfigs, () -> testTriggered.set(true), mockedFeatureFlags, mockedSecretMigrator, configsDslContext, jobsDslContext, + configsFlyway, jobsFlyway) + .load(); + + assertTrue(testTriggered.get()); + } + } + + private Flyway createConfigsFlyway(final DataSource dataSource) { + return FlywayFactory.create(dataSource, getClass().getName(), ConfigsDatabaseMigrator.DB_IDENTIFIER, + ConfigsDatabaseMigrator.MIGRATION_FILE_LOCATION); + } + + private Flyway createJobsFlyway(final DataSource dataSource) { + return FlywayFactory.create(dataSource, getClass().getName(), JobsDatabaseMigrator.DB_IDENTIFIER, + JobsDatabaseMigrator.MIGRATION_FILE_LOCATION); + } - assertTrue(testTriggered.get()); + private void closeDataSource(final DataSource dataSource) throws IOException { + if (dataSource instanceof Closeable closeable) { + closeable.close(); + } } } diff --git a/airbyte-commons/src/main/java/io/airbyte/commons/lang/CloseableShutdownHook.java b/airbyte-commons/src/main/java/io/airbyte/commons/lang/CloseableShutdownHook.java new file mode 100644 index 0000000000000..2ac113181c5d3 --- /dev/null +++ b/airbyte-commons/src/main/java/io/airbyte/commons/lang/CloseableShutdownHook.java @@ -0,0 +1,63 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.lang; + +import com.google.common.annotations.VisibleForTesting; +import java.io.Closeable; +import java.util.Collection; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Registers a shutdown hook that calls the close method of the provided objects. If an object does + * not support either the {@link AutoCloseable} or {@link Closeable} interface, it will be ignored. + * + * This is a temporary class that is being provided to ensure that resources created by each + * application are properly closed on shutdown. This logic will no longer be necessary once an + * application framework is introduced to the project that can provide object lifecycle management. + */ +public class CloseableShutdownHook { + + private static final Logger LOGGER = LoggerFactory.getLogger(CloseableShutdownHook.class); + + /** + * Registers a runtime shutdown hook with the application for each provided closeable object. + * + * @param objects An array of objects to be closed on application shutdown. + */ + public static void registerRuntimeShutdownHook(final Object... objects) { + Runtime.getRuntime().addShutdownHook(buildShutdownHookThread(objects)); + } + + /** + * Builds the {@link Thread} that will be registered as an application shutdown hook. + * + * @param objects An array of objects to be closed on application shutdown. + * @return The application shutdown hook {@link Thread}. + */ + @VisibleForTesting + static Thread buildShutdownHookThread(final Object... objects) { + final Collection autoCloseables = Stream.of(objects) + .filter(o -> o != null) + .filter(o -> o instanceof AutoCloseable) + .map(o -> AutoCloseable.class.cast(o)) + .collect(Collectors.toList()); + + return new Thread(() -> { + autoCloseables.forEach(CloseableShutdownHook::close); + }); + } + + private static void close(final AutoCloseable autoCloseable) { + try { + autoCloseable.close(); + } catch (final Exception e) { + LOGGER.error("Unable to close object {}.", autoCloseable.getClass().getName(), e); + } + } + +} diff --git a/airbyte-commons/src/test/java/io/airbyte/commons/lang/CloseableShutdownHookTest.java b/airbyte-commons/src/test/java/io/airbyte/commons/lang/CloseableShutdownHookTest.java new file mode 100644 index 0000000000000..86e24bfab4357 --- /dev/null +++ b/airbyte-commons/src/test/java/io/airbyte/commons/lang/CloseableShutdownHookTest.java @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.lang; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + +import java.io.InputStream; +import org.junit.jupiter.api.Test; + +public class CloseableShutdownHookTest { + + @Test + void testRegisteringShutdownHook() throws Exception { + final InputStream closeable = mock(InputStream.class); + final CloseableQueue autoCloseable = mock(CloseableQueue.class); + final String notCloseable = "Not closeable"; + + final Thread thread = CloseableShutdownHook.buildShutdownHookThread(closeable, autoCloseable, notCloseable, null); + thread.run(); + + verify(closeable, times(1)).close(); + verify(autoCloseable, times(1)).close(); + } + +} diff --git a/airbyte-config/persistence/build.gradle b/airbyte-config/persistence/build.gradle index 8fe422a4fa679..6c415f1e6af9e 100644 --- a/airbyte-config/persistence/build.gradle +++ b/airbyte-config/persistence/build.gradle @@ -16,7 +16,8 @@ dependencies { implementation 'com.google.cloud:google-cloud-secretmanager:2.0.5' testImplementation 'org.hamcrest:hamcrest-all:1.3' - testImplementation "org.testcontainers:postgresql:1.15.3" + testImplementation libs.testcontainers.postgresql + testImplementation libs.flyway.core testImplementation project(':airbyte-test-utils') integrationTestJavaImplementation project(':airbyte-config:persistence') } diff --git a/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretPersistence.java b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretPersistence.java index f90ec25b92da1..c76ac26b5634d 100644 --- a/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretPersistence.java +++ b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretPersistence.java @@ -9,6 +9,7 @@ import io.airbyte.db.instance.configs.ConfigsDatabaseInstance; import java.io.IOException; import java.util.Optional; +import org.jooq.DSLContext; /** * Provides the ability to read and write secrets to a backing store. Assumes that secret payloads @@ -21,15 +22,10 @@ public interface SecretPersistence extends ReadOnlySecretPersistence { void write(final SecretCoordinate coordinate, final String payload) throws IllegalArgumentException; - static Optional getLongLived(final Configs configs) throws IOException { + static Optional getLongLived(final DSLContext dslContext, final Configs configs) throws IOException { switch (configs.getSecretPersistenceType()) { case TESTING_CONFIG_DB_TABLE -> { - final Database configDatabase = new ConfigsDatabaseInstance( - configs.getConfigDatabaseUser(), - configs.getConfigDatabasePassword(), - configs.getConfigDatabaseUrl()) - .getAndInitialize(); - + final Database configDatabase = new ConfigsDatabaseInstance(dslContext).getAndInitialize(); return Optional.of(new LocalTestingSecretPersistence(configDatabase)); } case GOOGLE_SECRET_MANAGER -> { @@ -41,8 +37,8 @@ static Optional getLongLived(final Configs configs) throws IO } } - static SecretsHydrator getSecretsHydrator(final Configs configs) throws IOException { - final var persistence = getLongLived(configs); + static SecretsHydrator getSecretsHydrator(final DSLContext dslContext, final Configs configs) throws IOException { + final var persistence = getLongLived(dslContext, configs); if (persistence.isPresent()) { return new RealSecretsHydrator(persistence.get()); @@ -51,15 +47,10 @@ static SecretsHydrator getSecretsHydrator(final Configs configs) throws IOExcept } } - static Optional getEphemeral(final Configs configs) throws IOException { + static Optional getEphemeral(final DSLContext dslContext, final Configs configs) throws IOException { switch (configs.getSecretPersistenceType()) { case TESTING_CONFIG_DB_TABLE -> { - final Database configDatabase = new ConfigsDatabaseInstance( - configs.getConfigDatabaseUser(), - configs.getConfigDatabasePassword(), - configs.getConfigDatabaseUrl()) - .getAndInitialize(); - + final Database configDatabase = new ConfigsDatabaseInstance(dslContext).getAndInitialize(); return Optional.of(new LocalTestingSecretPersistence(configDatabase)); } case GOOGLE_SECRET_MANAGER -> { diff --git a/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/BaseDatabaseConfigPersistenceTest.java b/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/BaseDatabaseConfigPersistenceTest.java index a2037db015492..a5e00f96d05ba 100644 --- a/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/BaseDatabaseConfigPersistenceTest.java +++ b/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/BaseDatabaseConfigPersistenceTest.java @@ -29,6 +29,9 @@ import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; +import javax.sql.DataSource; +import org.flywaydb.core.Flyway; +import org.jooq.DSLContext; import org.jooq.Record1; import org.jooq.Result; import org.jooq.Table; @@ -45,6 +48,9 @@ public abstract class BaseDatabaseConfigPersistenceTest { protected static Database database; protected static DatabaseConfigPersistence configPersistence; protected static JsonSecretsProcessor jsonSecretsProcessor; + protected static DataSource dataSource; + protected static DSLContext dslContext; + protected static Flyway flyway; @BeforeAll public static void dbSetup() { diff --git a/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/ConfigRepositoryE2EReadWriteTest.java b/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/ConfigRepositoryE2EReadWriteTest.java index 29d1644e1d816..e1c40b9cbe3aa 100644 --- a/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/ConfigRepositoryE2EReadWriteTest.java +++ b/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/ConfigRepositoryE2EReadWriteTest.java @@ -28,6 +28,8 @@ import io.airbyte.config.StandardWorkspace; import io.airbyte.config.persistence.split_secrets.JsonSecretsProcessor; import io.airbyte.db.Database; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.FlywayFactory; import io.airbyte.db.instance.configs.ConfigsDatabaseInstance; import io.airbyte.db.instance.configs.ConfigsDatabaseMigrator; import io.airbyte.db.instance.development.DevDatabaseMigrator; @@ -36,6 +38,7 @@ import io.airbyte.protocol.models.CatalogHelpers; import io.airbyte.protocol.models.Field; import io.airbyte.protocol.models.JsonSchemaType; +import io.airbyte.test.utils.DatabaseConnectionHelper; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; import java.sql.SQLException; @@ -47,6 +50,10 @@ import java.util.Set; import java.util.UUID; import java.util.stream.Collectors; +import javax.sql.DataSource; +import org.flywaydb.core.Flyway; +import org.jooq.DSLContext; +import org.jooq.SQLDialect; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; @@ -56,10 +63,13 @@ public class ConfigRepositoryE2EReadWriteTest { private static PostgreSQLContainer container; + private DataSource dataSource; + private DSLContext dslContext; private Database database; private ConfigRepository configRepository; private DatabaseConfigPersistence configPersistence; private JsonSecretsProcessor jsonSecretsProcessor; + private Flyway flyway; @BeforeAll public static void dbSetup() { @@ -72,12 +82,16 @@ public static void dbSetup() { @BeforeEach void setup() throws IOException, JsonValidationException, SQLException { - database = new ConfigsDatabaseInstance(container.getUsername(), container.getPassword(), container.getJdbcUrl()).getAndInitialize(); + dataSource = DatabaseConnectionHelper.createDataSource(container); + dslContext = DSLContextFactory.create(dataSource, SQLDialect.POSTGRES); + flyway = FlywayFactory.create(dataSource, DatabaseConfigPersistenceLoadDataTest.class.getName(), ConfigsDatabaseMigrator.DB_IDENTIFIER, + ConfigsDatabaseMigrator.MIGRATION_FILE_LOCATION); + database = new ConfigsDatabaseInstance(dslContext).getAndInitialize(); jsonSecretsProcessor = mock(JsonSecretsProcessor.class); configPersistence = spy(new DatabaseConfigPersistence(database, jsonSecretsProcessor)); configRepository = spy(new ConfigRepository(configPersistence, database)); final ConfigsDatabaseMigrator configsDatabaseMigrator = - new ConfigsDatabaseMigrator(database, DatabaseConfigPersistenceLoadDataTest.class.getName()); + new ConfigsDatabaseMigrator(database, flyway); final DevDatabaseMigrator devDatabaseMigrator = new DevDatabaseMigrator(configsDatabaseMigrator); MigrationDevHelper.runLastMigration(devDatabaseMigrator); for (final StandardWorkspace workspace : MockData.standardWorkspaces()) { diff --git a/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/DatabaseConfigPersistenceE2EReadWriteTest.java b/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/DatabaseConfigPersistenceE2EReadWriteTest.java index f05fbb986ff67..ad215afe6a4b8 100644 --- a/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/DatabaseConfigPersistenceE2EReadWriteTest.java +++ b/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/DatabaseConfigPersistenceE2EReadWriteTest.java @@ -24,13 +24,18 @@ import io.airbyte.config.StandardSyncState; import io.airbyte.config.StandardWorkspace; import io.airbyte.config.WorkspaceServiceAccount; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.FlywayFactory; import io.airbyte.db.instance.configs.ConfigsDatabaseInstance; import io.airbyte.db.instance.configs.ConfigsDatabaseMigrator; import io.airbyte.db.instance.development.DevDatabaseMigrator; import io.airbyte.db.instance.development.MigrationDevHelper; +import io.airbyte.test.utils.DatabaseConnectionHelper; import io.airbyte.validation.json.JsonValidationException; +import java.io.Closeable; import java.io.IOException; import java.util.List; +import org.jooq.SQLDialect; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -39,19 +44,26 @@ public class DatabaseConfigPersistenceE2EReadWriteTest extends BaseDatabaseConfi @BeforeEach public void setup() throws Exception { - database = new ConfigsDatabaseInstance(container.getUsername(), container.getPassword(), container.getJdbcUrl()).getAndInitialize(); + dataSource = DatabaseConnectionHelper.createDataSource(container); + dslContext = DSLContextFactory.create(dataSource, SQLDialect.POSTGRES); + database = new ConfigsDatabaseInstance(dslContext).getAndInitialize(); + flyway = FlywayFactory.create(dataSource, DatabaseConfigPersistenceLoadDataTest.class.getName(), ConfigsDatabaseMigrator.DB_IDENTIFIER, + ConfigsDatabaseMigrator.MIGRATION_FILE_LOCATION); configPersistence = spy(new DatabaseConfigPersistence(database, jsonSecretsProcessor)); final ConfigsDatabaseMigrator configsDatabaseMigrator = - new ConfigsDatabaseMigrator(database, DatabaseConfigPersistenceLoadDataTest.class.getName()); + new ConfigsDatabaseMigrator(database, flyway); final DevDatabaseMigrator devDatabaseMigrator = new DevDatabaseMigrator(configsDatabaseMigrator); MigrationDevHelper.runLastMigration(devDatabaseMigrator); truncateAllTables(); } @AfterEach - void tearDown() throws Exception { - database.close(); + void tearDown() throws IOException { + dslContext.close(); + if (dataSource instanceof Closeable closeable) { + closeable.close(); + } } @Test diff --git a/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/DatabaseConfigPersistenceLoadDataTest.java b/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/DatabaseConfigPersistenceLoadDataTest.java index 5033c50b706a1..1665be0f7d968 100644 --- a/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/DatabaseConfigPersistenceLoadDataTest.java +++ b/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/DatabaseConfigPersistenceLoadDataTest.java @@ -21,13 +21,19 @@ import io.airbyte.config.StandardDestinationDefinition; import io.airbyte.config.StandardSourceDefinition; import io.airbyte.config.StandardWorkspace; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.FlywayFactory; import io.airbyte.db.instance.configs.ConfigsDatabaseInstance; import io.airbyte.db.instance.configs.ConfigsDatabaseMigrator; import io.airbyte.db.instance.development.DevDatabaseMigrator; import io.airbyte.db.instance.development.MigrationDevHelper; +import io.airbyte.test.utils.DatabaseConnectionHelper; +import java.io.Closeable; +import java.io.IOException; import java.util.Collections; import java.util.UUID; import org.jooq.DSLContext; +import org.jooq.SQLDialect; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; @@ -47,18 +53,26 @@ public class DatabaseConfigPersistenceLoadDataTest extends BaseDatabaseConfigPer @BeforeAll public static void setup() throws Exception { - database = new ConfigsDatabaseInstance(container.getUsername(), container.getPassword(), container.getJdbcUrl()).getAndInitialize(); + dataSource = DatabaseConnectionHelper.createDataSource(container); + dslContext = DSLContextFactory.create(dataSource, SQLDialect.POSTGRES); + database = new ConfigsDatabaseInstance(dslContext).getAndInitialize(); + flyway = FlywayFactory.create(dataSource, DatabaseConfigPersistenceLoadDataTest.class.getName(), ConfigsDatabaseMigrator.DB_IDENTIFIER, + ConfigsDatabaseMigrator.MIGRATION_FILE_LOCATION); + database = new ConfigsDatabaseInstance(dslContext).getAndInitialize(); configPersistence = spy(new DatabaseConfigPersistence(database, jsonSecretsProcessor)); final ConfigsDatabaseMigrator configsDatabaseMigrator = - new ConfigsDatabaseMigrator(database, DatabaseConfigPersistenceLoadDataTest.class.getName()); + new ConfigsDatabaseMigrator(database, flyway); final DevDatabaseMigrator devDatabaseMigrator = new DevDatabaseMigrator(configsDatabaseMigrator); MigrationDevHelper.runLastMigration(devDatabaseMigrator); truncateAllTables(); } @AfterAll - public static void tearDown() throws Exception { - database.close(); + public static void tearDown() throws IOException { + dslContext.close(); + if (dataSource instanceof Closeable closeable) { + closeable.close(); + } } @BeforeEach diff --git a/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/DatabaseConfigPersistenceTest.java b/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/DatabaseConfigPersistenceTest.java index 2bf79d927c8d9..a67fd90980929 100644 --- a/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/DatabaseConfigPersistenceTest.java +++ b/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/DatabaseConfigPersistenceTest.java @@ -30,11 +30,16 @@ import io.airbyte.config.StandardSourceDefinition; import io.airbyte.config.StandardSourceDefinition.ReleaseStage; import io.airbyte.config.persistence.DatabaseConfigPersistence.ConnectorInfo; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.FlywayFactory; import io.airbyte.db.instance.configs.ConfigsDatabaseInstance; import io.airbyte.db.instance.configs.ConfigsDatabaseMigrator; import io.airbyte.db.instance.development.DevDatabaseMigrator; import io.airbyte.db.instance.development.MigrationDevHelper; import io.airbyte.protocol.models.ConnectorSpecification; +import io.airbyte.test.utils.DatabaseConnectionHelper; +import java.io.Closeable; +import java.io.IOException; import java.time.Duration; import java.time.Instant; import java.time.LocalDate; @@ -47,6 +52,7 @@ import java.util.stream.Collectors; import java.util.stream.Stream; import org.assertj.core.api.Assertions; +import org.jooq.SQLDialect; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -59,18 +65,26 @@ public class DatabaseConfigPersistenceTest extends BaseDatabaseConfigPersistence @BeforeEach public void setup() throws Exception { - database = new ConfigsDatabaseInstance(container.getUsername(), container.getPassword(), container.getJdbcUrl()).getAndInitialize(); + dataSource = DatabaseConnectionHelper.createDataSource(container); + dslContext = DSLContextFactory.create(dataSource, SQLDialect.POSTGRES); + database = new ConfigsDatabaseInstance(dslContext).getAndInitialize(); + flyway = FlywayFactory.create(dataSource, DatabaseConfigPersistenceLoadDataTest.class.getName(), ConfigsDatabaseMigrator.DB_IDENTIFIER, + ConfigsDatabaseMigrator.MIGRATION_FILE_LOCATION); + database = new ConfigsDatabaseInstance(dslContext).getAndInitialize(); configPersistence = spy(new DatabaseConfigPersistence(database, jsonSecretsProcessor)); final ConfigsDatabaseMigrator configsDatabaseMigrator = - new ConfigsDatabaseMigrator(database, DatabaseConfigPersistenceLoadDataTest.class.getName()); + new ConfigsDatabaseMigrator(database, flyway); final DevDatabaseMigrator devDatabaseMigrator = new DevDatabaseMigrator(configsDatabaseMigrator); MigrationDevHelper.runLastMigration(devDatabaseMigrator); truncateAllTables(); } @AfterEach - void tearDown() throws Exception { - database.close(); + void tearDown() throws IOException { + dslContext.close(); + if (dataSource instanceof Closeable closeable) { + closeable.close(); + } } @Test diff --git a/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/DatabaseConfigPersistenceUpdateConnectorDefinitionsTest.java b/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/DatabaseConfigPersistenceUpdateConnectorDefinitionsTest.java index 6b6261aed509d..a92260af61843 100644 --- a/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/DatabaseConfigPersistenceUpdateConnectorDefinitionsTest.java +++ b/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/DatabaseConfigPersistenceUpdateConnectorDefinitionsTest.java @@ -12,10 +12,14 @@ import io.airbyte.config.ConfigSchema; import io.airbyte.config.StandardSourceDefinition; import io.airbyte.config.persistence.DatabaseConfigPersistence.ConnectorInfo; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.FlywayFactory; import io.airbyte.db.instance.configs.ConfigsDatabaseInstance; import io.airbyte.db.instance.configs.ConfigsDatabaseMigrator; import io.airbyte.db.instance.development.DevDatabaseMigrator; import io.airbyte.db.instance.development.MigrationDevHelper; +import io.airbyte.test.utils.DatabaseConnectionHelper; +import java.io.Closeable; import java.io.IOException; import java.sql.SQLException; import java.util.Collections; @@ -23,6 +27,7 @@ import java.util.Map; import java.util.Set; import java.util.stream.Collectors; +import org.jooq.SQLDialect; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; @@ -38,17 +43,25 @@ public class DatabaseConfigPersistenceUpdateConnectorDefinitionsTest extends Bas @BeforeAll public static void setup() throws Exception { - database = new ConfigsDatabaseInstance(container.getUsername(), container.getPassword(), container.getJdbcUrl()).getAndInitialize(); + dataSource = DatabaseConnectionHelper.createDataSource(container); + dslContext = DSLContextFactory.create(dataSource, SQLDialect.POSTGRES); + database = new ConfigsDatabaseInstance(dslContext).getAndInitialize(); + flyway = FlywayFactory.create(dataSource, DatabaseConfigPersistenceLoadDataTest.class.getName(), ConfigsDatabaseMigrator.DB_IDENTIFIER, + ConfigsDatabaseMigrator.MIGRATION_FILE_LOCATION); + database = new ConfigsDatabaseInstance(dslContext).getAndInitialize(); configPersistence = new DatabaseConfigPersistence(database, jsonSecretsProcessor); final ConfigsDatabaseMigrator configsDatabaseMigrator = - new ConfigsDatabaseMigrator(database, DatabaseConfigPersistenceLoadDataTest.class.getName()); + new ConfigsDatabaseMigrator(database, flyway); final DevDatabaseMigrator devDatabaseMigrator = new DevDatabaseMigrator(configsDatabaseMigrator); MigrationDevHelper.runLastMigration(devDatabaseMigrator); } @AfterAll - public static void tearDown() throws Exception { - database.close(); + public static void tearDown() throws IOException { + dslContext.close(); + if (dataSource instanceof Closeable closeable) { + closeable.close(); + } } @BeforeEach diff --git a/airbyte-container-orchestrator/build.gradle b/airbyte-container-orchestrator/build.gradle index 6356b42c3bee6..ee0da304ba3d7 100644 --- a/airbyte-container-orchestrator/build.gradle +++ b/airbyte-container-orchestrator/build.gradle @@ -20,9 +20,9 @@ dependencies { implementation project(':airbyte-workers') testImplementation 'org.mockito:mockito-inline:2.13.0' - testImplementation 'org.postgresql:postgresql:42.2.18' - testImplementation 'org.testcontainers:testcontainers:1.15.3' - testImplementation 'org.testcontainers:postgresql:1.15.3' + testImplementation libs.postgresql + testImplementation libs.testcontainers + testImplementation libs.testcontainers.postgresql testImplementation project(':airbyte-commons-docker') } diff --git a/airbyte-db/jooq/build.gradle b/airbyte-db/jooq/build.gradle index 48041d1b1acc7..24b41309eedba 100644 --- a/airbyte-db/jooq/build.gradle +++ b/airbyte-db/jooq/build.gradle @@ -4,16 +4,15 @@ plugins { } dependencies { - implementation 'org.jooq:jooq-meta:3.13.4' - implementation 'org.jooq:jooq:3.13.4' - implementation 'org.postgresql:postgresql:42.3.1' - implementation "org.flywaydb:flyway-core:7.14.0" - + implementation libs.jooq.meta + implementation libs.jooq + implementation libs.postgresql + implementation libs.flyway.core implementation project(':airbyte-db:lib') // jOOQ code generation - implementation 'org.jooq:jooq-codegen:3.13.4' - implementation "org.testcontainers:postgresql:1.15.3" + implementation libs.jooq.codegen + implementation libs.testcontainers.postgresql // These are required because gradle might be using lower version of Jna from other // library transitive dependency. Can be removed if we can figure out which library is the cause. // Refer: https://github.com/testcontainers/testcontainers-java/issues/3834#issuecomment-825409079 diff --git a/airbyte-db/lib/build.gradle b/airbyte-db/lib/build.gradle index 3d52778abfcf6..352fce360b96d 100644 --- a/airbyte-db/lib/build.gradle +++ b/airbyte-db/lib/build.gradle @@ -3,24 +3,22 @@ plugins { } dependencies { - api 'org.apache.commons:commons-dbcp2:2.7.0' - api 'com.zaxxer:HikariCP:5.0.1' - api 'org.jooq:jooq-meta:3.13.4' - api 'org.jooq:jooq:3.13.4' - api 'org.postgresql:postgresql:42.2.18' + api libs.hikaricp + api libs.jooq.meta + api libs.jooq + api libs.postgresql implementation project(':airbyte-protocol:models') implementation project(':airbyte-json-validation') implementation project(':airbyte-config:models') - implementation "org.flywaydb:flyway-core:7.14.0" - implementation "org.testcontainers:postgresql:1.15.3" + implementation libs.flyway.core + implementation libs.testcontainers.postgresql // These are required because gradle might be using lower version of Jna from other // library transitive dependency. Can be removed if we can figure out which library is the cause. // Refer: https://github.com/testcontainers/testcontainers-java/issues/3834#issuecomment-825409079 implementation 'net.java.dev.jna:jna:5.8.0' implementation 'net.java.dev.jna:jna-platform:5.8.0' - testImplementation project(':airbyte-test-utils') testImplementation 'org.apache.commons:commons-lang3:3.11' diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/Database.java b/airbyte-db/lib/src/main/java/io/airbyte/db/Database.java index 0f779909781da..e620adef10a87 100644 --- a/airbyte-db/lib/src/main/java/io/airbyte/db/Database.java +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/Database.java @@ -4,48 +4,84 @@ package io.airbyte.db; -import java.io.Closeable; +import io.airbyte.commons.lang.Exceptions; +import java.io.IOException; import java.sql.SQLException; -import javax.sql.DataSource; -import org.jooq.SQLDialect; +import java.util.function.Function; +import lombok.val; +import org.jooq.DSLContext; import org.jooq.impl.DSL; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Database object for interacting with a Jooq connection. */ -public class Database implements AutoCloseable { +public class Database { - private final DataSource ds; - private final SQLDialect dialect; + private static final Logger LOGGER = LoggerFactory.getLogger(Database.class); - public Database(final DataSource ds, final SQLDialect dialect) { - this.ds = ds; - this.dialect = dialect; + private static final long DEFAULT_WAIT_MS = 5 * 1000; + + private final DSLContext dslContext; + + public Database(final DSLContext dslContext) { + this.dslContext = dslContext; } public T query(final ContextQueryFunction transform) throws SQLException { - return transform.query(DSL.using(ds, dialect)); + return transform.query(dslContext); } public T transaction(final ContextQueryFunction transform) throws SQLException { - return DSL.using(ds, dialect).transactionResult(configuration -> transform.query(DSL.using(configuration))); + return dslContext.transactionResult(configuration -> transform.query(DSL.using(configuration))); } - public DataSource getDataSource() { - return ds; + public static Database createWithRetry(final DSLContext dslContext, + final Function isDbReady) { + Database database = null; + while (database == null) { + try { + val infinity = Integer.MAX_VALUE; + database = createWithRetryTimeout(dslContext, isDbReady, infinity); + } catch (final IOException e) { + // This should theoretically never happen since we set the timeout to be a very high number. + } + } + + LOGGER.info("Database available!"); + return database; } - @Override - public void close() throws Exception { - // Just a safety in case we are using a datasource implementation that requires closing. - // BasicDataSource from apache does since it also provides a pooling mechanism to reuse connections. + public static Database createWithRetryTimeout(final DSLContext dslContext, + final Function isDbReady, + final long timeoutMs) + throws IOException { + Database database = null; + var totalTime = 0; + while (database == null) { + LOGGER.warn("Waiting for database to become available..."); + if (totalTime >= timeoutMs) { + throw new IOException("Unable to connect to database."); + } - if (ds instanceof AutoCloseable) { - ((AutoCloseable) ds).close(); - } - if (ds instanceof Closeable) { - ((Closeable) ds).close(); + try { + database = new Database(dslContext); + if (!isDbReady.apply(database)) { + LOGGER.info("Database is not ready yet. Please wait a moment, it might still be initializing..."); + database = null; + Exceptions.toRuntime(() -> Thread.sleep(DEFAULT_WAIT_MS)); + totalTime += DEFAULT_WAIT_MS; + } + } catch (final Exception e) { + // Ignore the exception because this likely means that the database server is still initializing. + LOGGER.warn("Ignoring exception while trying to request database:", e); + database = null; + Exceptions.toRuntime(() -> Thread.sleep(DEFAULT_WAIT_MS)); + totalTime += DEFAULT_WAIT_MS; + } } + return database; } } diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/Databases.java b/airbyte-db/lib/src/main/java/io/airbyte/db/Databases.java index 2df0b624f0794..63a04ce7f8832 100644 --- a/airbyte-db/lib/src/main/java/io/airbyte/db/Databases.java +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/Databases.java @@ -7,6 +7,9 @@ import com.google.common.collect.Maps; import io.airbyte.commons.lang.Exceptions; import io.airbyte.db.bigquery.BigQueryDatabase; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.db.jdbc.JdbcSourceOperations; @@ -18,8 +21,8 @@ import java.util.Map; import java.util.function.Function; import java.util.function.Supplier; +import javax.sql.DataSource; import lombok.val; -import org.apache.commons.dbcp2.BasicDataSource; import org.jooq.SQLDialect; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -49,7 +52,7 @@ public static Database createPostgresDatabase(final String username, } public static Database createPostgresDatabase(final String username, final String password, final String jdbcConnectionString) { - return createDatabase(username, password, jdbcConnectionString, "org.postgresql.Driver", SQLDialect.POSTGRES); + return createDatabase(username, password, jdbcConnectionString, DatabaseDriver.POSTGRESQL.getDriverClassName(), SQLDialect.POSTGRES); } public static Database createPostgresDatabaseWithRetry(final String username, @@ -93,8 +96,6 @@ public static Database createPostgresDatabaseWithRetryTimeout(final String usern database = createPostgresDatabase(username, password, jdbcConnectionString); if (!isDbReady.apply(database)) { LOGGER.info("Database is not ready yet. Please wait a moment, it might still be initializing..."); - database.close(); - database = null; Exceptions.toRuntime(() -> Thread.sleep(DEFAULT_WAIT_MS)); totalTime += DEFAULT_WAIT_MS; @@ -141,9 +142,7 @@ public static Database createDatabase(final String username, final String jdbcConnectionString, final String driverClassName, final SQLDialect dialect) { - final BasicDataSource connectionPool = createBasicDataSource(username, password, jdbcConnectionString, driverClassName); - - return new Database(connectionPool, dialect); + return new Database(DSLContextFactory.create(username, password, driverClassName, jdbcConnectionString, dialect)); } public static Database createDatabase(final String username, @@ -152,10 +151,7 @@ public static Database createDatabase(final String username, final String driverClassName, final SQLDialect dialect, final Map connectionProperties) { - final BasicDataSource connectionPool = - createBasicDataSource(username, password, jdbcConnectionString, driverClassName, connectionProperties); - - return new Database(connectionPool, dialect); + return new Database(DSLContextFactory.create(username, password, driverClassName, jdbcConnectionString, dialect, connectionProperties)); } public static JdbcDatabase createJdbcDatabase(final String username, @@ -170,7 +166,7 @@ public static JdbcDatabase createJdbcDatabase(final String username, final String jdbcConnectionString, final String driverClassName, final JdbcSourceOperations sourceOperations) { - final BasicDataSource connectionPool = createBasicDataSource(username, password, jdbcConnectionString, driverClassName); + final DataSource connectionPool = createBasicDataSource(username, password, jdbcConnectionString, driverClassName); return new DefaultJdbcDatabase(connectionPool, sourceOperations); } @@ -190,7 +186,7 @@ public static JdbcDatabase createJdbcDatabase(final String username, final String driverClassName, final Map connectionProperties, final JdbcCompatibleSourceOperations sourceOperations) { - final BasicDataSource connectionPool = + final DataSource connectionPool = createBasicDataSource(username, password, jdbcConnectionString, driverClassName, connectionProperties); return new DefaultJdbcDatabase(connectionPool, sourceOperations); @@ -203,34 +199,26 @@ public static JdbcDatabase createStreamingJdbcDatabase(final String username, final Supplier streamingQueryConfigProvider, final Map connectionProperties, final JdbcCompatibleSourceOperations sourceOperations) { - final BasicDataSource connectionPool = + final DataSource connectionPool = createBasicDataSource(username, password, jdbcConnectionString, driverClassName, connectionProperties); return new StreamingJdbcDatabase(connectionPool, sourceOperations, streamingQueryConfigProvider); } - private static BasicDataSource createBasicDataSource(final String username, - final String password, - final String jdbcConnectionString, - final String driverClassName) { + private static DataSource createBasicDataSource(final String username, + final String password, + final String jdbcConnectionString, + final String driverClassName) { return createBasicDataSource(username, password, jdbcConnectionString, driverClassName, Maps.newHashMap()); } - public static BasicDataSource createBasicDataSource(final String username, - final String password, - final String jdbcConnectionString, - final String driverClassName, - final Map connectionProperties) { - final BasicDataSource connectionPool = new BasicDataSource(); - connectionPool.setDriverClassName(driverClassName); - connectionPool.setUsername(username); - connectionPool.setPassword(password); - connectionPool.setInitialSize(0); - connectionPool.setMaxTotal(5); - connectionPool.setUrl(jdbcConnectionString); - connectionProperties.forEach(connectionPool::addConnectionProperty); - return connectionPool; + public static DataSource createBasicDataSource(final String username, + final String password, + final String jdbcConnectionString, + final String driverClassName, + final Map connectionProperties) { + return DataSourceFactory.create(username, password, driverClassName, jdbcConnectionString, connectionProperties); } public static BigQueryDatabase createBigQueryDatabase(final String projectId, final String jsonCreds) { diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/ExceptionWrappingDatabase.java b/airbyte-db/lib/src/main/java/io/airbyte/db/ExceptionWrappingDatabase.java index 2b3466073f00f..02f6188a55225 100644 --- a/airbyte-db/lib/src/main/java/io/airbyte/db/ExceptionWrappingDatabase.java +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/ExceptionWrappingDatabase.java @@ -10,7 +10,7 @@ /** * Wraps a {@link Database} object and throwing IOExceptions instead of SQLExceptions. */ -public class ExceptionWrappingDatabase implements AutoCloseable { +public class ExceptionWrappingDatabase { private final Database database; @@ -34,9 +34,4 @@ public T transaction(final ContextQueryFunction transform) throws IOExcep } } - @Override - public void close() throws Exception { - database.close(); - } - } diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/bigquery/TempBigQueryJoolDatabaseImpl.java b/airbyte-db/lib/src/main/java/io/airbyte/db/bigquery/TempBigQueryJoolDatabaseImpl.java index ec01006410744..93d5472172cc4 100644 --- a/airbyte-db/lib/src/main/java/io/airbyte/db/bigquery/TempBigQueryJoolDatabaseImpl.java +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/bigquery/TempBigQueryJoolDatabaseImpl.java @@ -6,8 +6,8 @@ import io.airbyte.db.ContextQueryFunction; import io.airbyte.db.Database; -import io.airbyte.db.Databases; import java.sql.SQLException; +import javax.annotation.Nullable; import org.jooq.Record; import org.jooq.Result; import org.jooq.SQLDialect; @@ -22,8 +22,8 @@ public class TempBigQueryJoolDatabaseImpl extends Database { private final BigQueryDatabase realDatabase; public TempBigQueryJoolDatabaseImpl(final String projectId, final String jsonCreds) { - super(null, null); - realDatabase = Databases.createBigQueryDatabase(projectId, jsonCreds); + super(null); + realDatabase = createBigQueryDatabase(projectId, jsonCreds); } @Override @@ -36,11 +36,6 @@ public T transaction(final ContextQueryFunction transform) throws SQLExce return transform.query(new FakeDefaultDSLContext(realDatabase)); } - @Override - public void close() throws Exception { - realDatabase.close(); - } - public BigQueryDatabase getRealDatabase() { return realDatabase; } @@ -55,6 +50,7 @@ public FakeDefaultDSLContext(final BigQueryDatabase database) { } @Override + @Nullable public Result fetch(final String sql) throws DataAccessException { try { database.execute(sql); @@ -66,4 +62,8 @@ public Result fetch(final String sql) throws DataAccessException { } + public static BigQueryDatabase createBigQueryDatabase(final String projectId, final String jsonCreds) { + return new BigQueryDatabase(projectId, jsonCreds); + } + } diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/factory/DSLContextFactory.java b/airbyte-db/lib/src/main/java/io/airbyte/db/factory/DSLContextFactory.java index eba32e7cb6201..929607cd4b94c 100644 --- a/airbyte-db/lib/src/main/java/io/airbyte/db/factory/DSLContextFactory.java +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/factory/DSLContextFactory.java @@ -4,6 +4,7 @@ package io.airbyte.db.factory; +import java.util.Map; import javax.sql.DataSource; import org.jooq.DSLContext; import org.jooq.SQLDialect; @@ -29,4 +30,42 @@ public static DSLContext create(final DataSource dataSource, final SQLDialect di return DSL.using(dataSource, dialect); } + /** + * Constructs a configured {@link DSLContext} instance using the provided configuration. + * + * @param username The username of the database user. + * @param password The password of the database user. + * @param driverClassName The fully qualified name of the JDBC driver class. + * @param jdbcConnectionString The JDBC connection string. + * @param dialect The SQL dialect to use with objects created from this context. + * @return The configured {@link DSLContext}. + */ + public static DSLContext create(final String username, + final String password, + final String driverClassName, + final String jdbcConnectionString, + final SQLDialect dialect) { + return DSL.using(DataSourceFactory.create(username, password, driverClassName, jdbcConnectionString), dialect); + } + + /** + * Constructs a configured {@link DSLContext} instance using the provided configuration. + * + * @param username The username of the database user. + * @param password The password of the database user. + * @param driverClassName The fully qualified name of the JDBC driver class. + * @param jdbcConnectionString The JDBC connection string. + * @param dialect The SQL dialect to use with objects created from this context. + * @param connectionProperties Additional configuration properties for the underlying driver. + * @return The configured {@link DSLContext}. + */ + public static DSLContext create(final String username, + final String password, + final String driverClassName, + final String jdbcConnectionString, + final SQLDialect dialect, + final Map connectionProperties) { + return DSL.using(DataSourceFactory.create(username, password, driverClassName, jdbcConnectionString, connectionProperties), dialect); + } + } diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/factory/DataSourceFactory.java b/airbyte-db/lib/src/main/java/io/airbyte/db/factory/DataSourceFactory.java index 31c659a548cfb..1a861fef8d13a 100644 --- a/airbyte-db/lib/src/main/java/io/airbyte/db/factory/DataSourceFactory.java +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/factory/DataSourceFactory.java @@ -4,6 +4,7 @@ package io.airbyte.db.factory; +import com.google.common.base.Preconditions; import com.zaxxer.hikari.HikariConfig; import com.zaxxer.hikari.HikariDataSource; import java.util.Map; @@ -151,17 +152,9 @@ public static DataSource createPostgres(final String username, */ private static class DataSourceBuilder { - private static final Map JDBC_URL_FORMATS = Map.of("org.postgresql.Driver", "jdbc:postgresql://%s:%d/%s", - "com.amazon.redshift.jdbc.Driver", "jdbc:redshift://%s:%d/%s", - "com.mysql.cj.jdbc.Driver", "jdbc:mysql://%s:%d/%s", - "com.microsoft.sqlserver.jdbc.SQLServerDriver", "jdbc:sqlserver://%s:%d/%s", - "oracle.jdbc.OracleDriver", "jdbc:oracle:thin:@%s:%d:%s", - "ru.yandex.clickhouse.ClickHouseDriver", "jdbc:ch://%s:%d/%s", - "org.mariadb.jdbc.Driver", "jdbc:mariadb://%s:%d/%s"); - private Map connectionProperties = Map.of(); private String database; - private String driverClassName = "org.postgresql.Driver"; + private String driverClassName; private String host; private String jdbcUrl; private Integer maximumPoolSize = 5; @@ -231,19 +224,29 @@ public DataSourceBuilder withUsername(final String username) { } public DataSource build() { + final DatabaseDriver databaseDriver = DatabaseDriver.findByDriverClassName(driverClassName); + + Preconditions.checkNotNull(databaseDriver, "Unknown or blank driver class name: '" + driverClassName + "'."); + final HikariConfig config = new HikariConfig(); - config.setDriverClassName(driverClassName); - config.setJdbcUrl(jdbcUrl != null ? jdbcUrl : String.format(JDBC_URL_FORMATS.getOrDefault(driverClassName, ""), host, port, database)); + + config.setDriverClassName(databaseDriver.getDriverClassName()); + config.setJdbcUrl(jdbcUrl != null ? jdbcUrl : String.format(databaseDriver.getUrlFormatString(), host, port, database)); config.setMaximumPoolSize(maximumPoolSize); config.setMinimumIdle(minimumPoolSize); config.setPassword(password); config.setUsername(username); + /* + * Disable to prevent failing on startup. Applications may start prior to the database container + * being available. To avoid failing to create the connection pool, disable the fail check. This + * will preserve existing behavior that tests for the connection on first use, not on creation. + */ + config.setInitializationFailTimeout(Integer.MIN_VALUE); + connectionProperties.forEach(config::addDataSourceProperty); - final HikariDataSource dataSource = new HikariDataSource(config); - dataSource.validate(); - return dataSource; + return new HikariDataSource(config); } } diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/factory/DatabaseDriver.java b/airbyte-db/lib/src/main/java/io/airbyte/db/factory/DatabaseDriver.java new file mode 100644 index 0000000000000..93c953cf62b44 --- /dev/null +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/factory/DatabaseDriver.java @@ -0,0 +1,58 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.factory; + +/** + * Collection of JDBC driver class names and the associated JDBC URL format string. + */ +public enum DatabaseDriver { + + CLICKHOUSE("ru.yandex.clickhouse.ClickHouseDriver", "jdbc:clickhouse://%s:%d/%s"), + DB2("com.ibm.db2.jcc.DB2Driver", "jdbc:db2://%s:%d/%s"), + MARIADB("org.mariadb.jdbc.Driver", "jdbc:mariadb://%s:%d/%s"), + MSSQLSERVER("com.microsoft.sqlserver.jdbc.SQLServerDriver", "jdbc:sqlserver://%s:%d/%s"), + MYSQL("com.mysql.cj.jdbc.Driver", "jdbc:mysql://%s:%d/%s"), + ORACLE("oracle.jdbc.OracleDriver", "jdbc:oracle:thin:@%s:%d/%s"), + POSTGRESQL("org.postgresql.Driver", "jdbc:postgresql://%s:%d/%s"), + REDSHIFT("com.amazon.redshift.jdbc.Driver", "jdbc:redshift://%s:%d/%s"), + SNOWFLAKE("net.snowflake.client.jdbc.SnowflakeDriver", "jdbc:snowflake://%s/"); + + private final String driverClassName; + private final String urlFormatString; + + DatabaseDriver(final String driverClassName, final String urlFormatString) { + this.driverClassName = driverClassName; + this.urlFormatString = urlFormatString; + } + + public String getDriverClassName() { + return driverClassName; + } + + public String getUrlFormatString() { + return urlFormatString; + } + + /** + * Finds the {@link DatabaseDriver} enumerated value that matches the provided driver class name. + * + * @param driverClassName The driver class name. + * @return The matching {@link DatabaseDriver} enumerated value or {@code null} if no match is + * found. + */ + public static DatabaseDriver findByDriverClassName(final String driverClassName) { + DatabaseDriver selected = null; + + for (final DatabaseDriver candidate : values()) { + if (candidate.getDriverClassName().equalsIgnoreCase(driverClassName)) { + selected = candidate; + break; + } + } + + return selected; + } + +} diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/factory/FlywayFactory.java b/airbyte-db/lib/src/main/java/io/airbyte/db/factory/FlywayFactory.java index 0e5526745fd94..2e8bbcfdd1265 100644 --- a/airbyte-db/lib/src/main/java/io/airbyte/db/factory/FlywayFactory.java +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/factory/FlywayFactory.java @@ -38,16 +38,46 @@ public static Flyway create(final DataSource dataSource, final String installedBy, final String dbIdentifier, final String... migrationFileLocations) { + return create(dataSource, + installedBy, + dbIdentifier, + BASELINE_VERSION, + BASELINE_DESCRIPTION, + BASELINE_ON_MIGRATION, + migrationFileLocations); + } + + /** + * Constructs a configured {@link Flyway} instance using the provided configuration. + * + * @param dataSource The {@link DataSource} used to connect to the database. + * @param installedBy The name of the module performing the migration. + * @param dbIdentifier The name of the database to be migrated. This is used to name the table to + * hold the migration history for the database. + * @param baselineVersion The version to tag an existing schema with when executing baseline. + * @param baselineDescription The description to tag an existing schema with when executing + * baseline. + * @param baselineOnMigrate Whether to automatically call baseline when migrate is executed against + * a non-empty schema with no schema history table. + * @param migrationFileLocations The array of migration files to be used. + * @return The configured {@link Flyway} instance. + */ + public static Flyway create(final DataSource dataSource, + final String installedBy, + final String dbIdentifier, + final String baselineVersion, + final String baselineDescription, + final boolean baselineOnMigrate, + final String... migrationFileLocations) { return Flyway.configure() .dataSource(dataSource) - .baselineVersion(BASELINE_VERSION) - .baselineDescription(BASELINE_DESCRIPTION) - .baselineOnMigrate(BASELINE_ON_MIGRATION) + .baselineVersion(baselineVersion) + .baselineDescription(baselineDescription) + .baselineOnMigrate(baselineOnMigrate) .installedBy(installedBy) .table(String.format(MIGRATION_TABLE_FORMAT, dbIdentifier)) .locations(migrationFileLocations) .load(); - } } diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/BaseDatabaseInstance.java b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/BaseDatabaseInstance.java index 3cc2c21333ab1..b7ba44fbe5a65 100644 --- a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/BaseDatabaseInstance.java +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/BaseDatabaseInstance.java @@ -7,7 +7,6 @@ import static org.jooq.impl.DSL.select; import io.airbyte.db.Database; -import io.airbyte.db.Databases; import io.airbyte.db.ExceptionWrappingDatabase; import java.io.IOException; import java.util.Set; @@ -24,45 +23,37 @@ public abstract class BaseDatabaseInstance implements DatabaseInstance { private static final Logger LOGGER = LoggerFactory.getLogger(BaseDatabaseInstance.class); - protected final String username; - protected final String password; - protected final String connectionString; - protected final String initialSchema; + protected final DSLContext dslContext; protected final String databaseName; protected final Set initialExpectedTables; + protected final String initialSchema; protected final Function isDatabaseReady; /** - * @param connectionString in the format of - * jdbc:postgresql://${DATABASE_HOST}:${DATABASE_PORT/${DATABASE_DB} + * @param dslContext The configured {@link DSLContext}. * @param databaseName this name is only for logging purpose; it may not be the actual database name * in the server * @param initialSchema the initial database structure. + * @param initialExpectedTables The set of tables that should be present in order to consider the + * database ready for use. * @param isDatabaseReady a function to check if the database has been initialized and ready for * consumption */ - protected BaseDatabaseInstance(final String username, - final String password, - final String connectionString, - final String initialSchema, + protected BaseDatabaseInstance(final DSLContext dslContext, final String databaseName, + final String initialSchema, final Set initialExpectedTables, final Function isDatabaseReady) { - this.username = username; - this.password = password; - this.connectionString = connectionString; - this.initialSchema = initialSchema; + this.dslContext = dslContext; this.databaseName = databaseName; + this.initialSchema = initialSchema; this.initialExpectedTables = initialExpectedTables; this.isDatabaseReady = isDatabaseReady; } @Override public boolean isInitialized() throws IOException { - final Database database = Databases.createPostgresDatabaseWithRetryTimeout( - username, - password, - connectionString, + final Database database = Database.createWithRetryTimeout(dslContext, isDatabaseConnected(databaseName), DEFAULT_CONNECTION_TIMEOUT_MS); return new ExceptionWrappingDatabase(database).transaction(ctx -> initialExpectedTables.stream().allMatch(tableName -> hasTable(ctx, tableName))); @@ -72,11 +63,7 @@ public boolean isInitialized() throws IOException { public Database getInitialized() { // When we don't need to setup the database, it means the database is initialized // somewhere else, and it is considered ready only when data has been loaded into it. - return Databases.createPostgresDatabaseWithRetry( - username, - password, - connectionString, - isDatabaseReady); + return Database.createWithRetry(dslContext, isDatabaseReady); } @Override @@ -84,11 +71,7 @@ public Database getAndInitialize() throws IOException { // When we need to setup the database, it means the database will be initialized after // we connect to the database. So the database itself is considered ready as long as // the connection is alive. - final Database database = Databases.createPostgresDatabaseWithRetry( - username, - password, - connectionString, - isDatabaseConnected(databaseName)); + final Database database = Database.createWithRetry(dslContext, isDatabaseConnected(databaseName)); new ExceptionWrappingDatabase(database).transaction(ctx -> { final boolean hasTables = initialExpectedTables.stream().allMatch(tableName -> hasTable(ctx, tableName)); diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/FlywayDatabaseMigrator.java b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/FlywayDatabaseMigrator.java index 3ee251f2bd4cf..3890a4142467c 100644 --- a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/FlywayDatabaseMigrator.java +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/FlywayDatabaseMigrator.java @@ -14,7 +14,6 @@ import org.flywaydb.core.Flyway; import org.flywaydb.core.api.MigrationInfo; import org.flywaydb.core.api.MigrationInfoService; -import org.flywaydb.core.api.configuration.FluentConfiguration; import org.flywaydb.core.api.output.BaselineResult; import org.flywaydb.core.api.output.MigrateResult; import org.slf4j.Logger; @@ -23,49 +22,15 @@ public class FlywayDatabaseMigrator implements DatabaseMigrator { private static final Logger LOGGER = LoggerFactory.getLogger(FlywayDatabaseMigrator.class); - // Constants for Flyway baseline. See here for details: - // https://flywaydb.org/documentation/command/baseline - private static final String BASELINE_VERSION = "0.29.0.001"; - private static final String BASELINE_DESCRIPTION = "Baseline from file-based migration v1"; - private static final boolean BASELINE_ON_MIGRATION = true; private final Database database; private final Flyway flyway; - /** - * @param dbIdentifier A name to identify the database. Preferably one word. This identifier will be - * used to construct the migration history table name. For example, if the identifier is - * "imports", the history table name will be "airbyte_imports_migrations". - * @param migrationFileLocations Example: "classpath:db/migration". See: - * https://flywaydb.org/documentation/concepts/migrations#discovery-1 - */ - protected FlywayDatabaseMigrator(final Database database, - final String dbIdentifier, - final String migrationRunner, - final String migrationFileLocations) { - this(database, getConfiguration(database, dbIdentifier, migrationRunner, migrationFileLocations).load()); - } - - @VisibleForTesting public FlywayDatabaseMigrator(final Database database, final Flyway flyway) { this.database = database; this.flyway = flyway; } - private static FluentConfiguration getConfiguration(final Database database, - final String dbIdentifier, - final String migrationRunner, - final String migrationFileLocations) { - return Flyway.configure() - .dataSource(database.getDataSource()) - .baselineVersion(BASELINE_VERSION) - .baselineDescription(BASELINE_DESCRIPTION) - .baselineOnMigrate(BASELINE_ON_MIGRATION) - .installedBy(migrationRunner) - .table(String.format("airbyte_%s_migrations", dbIdentifier)) - .locations(migrationFileLocations); - } - @Override public MigrateResult migrate() { final MigrateResult result = flyway.migrate(); diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/FlywayMigrationDatabase.java b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/FlywayMigrationDatabase.java index 445d9fcbb14e8..11d529eff2522 100644 --- a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/FlywayMigrationDatabase.java +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/FlywayMigrationDatabase.java @@ -5,8 +5,13 @@ package io.airbyte.db.instance; import io.airbyte.db.Database; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.FlywayFactory; import java.io.IOException; import java.sql.Connection; +import javax.sql.DataSource; +import org.flywaydb.core.Flyway; import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.jooq.impl.DSL; @@ -32,9 +37,15 @@ public abstract class FlywayMigrationDatabase extends PostgresDatabase { private Connection connection; - protected abstract Database getAndInitializeDatabase(String username, String password, String connectionString) throws IOException; + protected abstract Database getAndInitializeDatabase(DSLContext dslContext) throws IOException; - protected abstract DatabaseMigrator getDatabaseMigrator(Database database); + protected abstract DatabaseMigrator getDatabaseMigrator(Database database, Flyway flyway); + + protected abstract String getInstalledBy(); + + protected abstract String getDbIdentifier(); + + protected abstract String[] getMigrationFileLocations(); @Override protected DSLContext create0() { @@ -64,11 +75,15 @@ private void createInternalConnection() throws Exception { .withPassword("jooq_generator"); container.start(); - final Database database = getAndInitializeDatabase(container.getUsername(), container.getPassword(), container.getJdbcUrl()); - final DatabaseMigrator migrator = getDatabaseMigrator(database); + final DataSource dataSource = + DataSourceFactory.create(container.getUsername(), container.getPassword(), container.getDriverClassName(), container.getJdbcUrl()); + final DSLContext dslContext = DSLContextFactory.create(dataSource, SQLDialect.POSTGRES); + final Flyway flyway = FlywayFactory.create(dataSource, getInstalledBy(), getDbIdentifier(), getMigrationFileLocations()); + final Database database = getAndInitializeDatabase(dslContext); + final DatabaseMigrator migrator = getDatabaseMigrator(database, flyway); migrator.migrate(); - connection = database.getDataSource().getConnection(); + connection = dataSource.getConnection(); setConnection(connection); } diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/configs/ConfigsDatabaseInstance.java b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/configs/ConfigsDatabaseInstance.java index 7ecee82492bd8..e1ff55f2935c4 100644 --- a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/configs/ConfigsDatabaseInstance.java +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/configs/ConfigsDatabaseInstance.java @@ -6,7 +6,6 @@ import io.airbyte.commons.resources.MoreResources; import io.airbyte.db.Database; -import io.airbyte.db.Databases; import io.airbyte.db.ExceptionWrappingDatabase; import io.airbyte.db.instance.BaseDatabaseInstance; import io.airbyte.db.instance.DatabaseInstance; @@ -14,6 +13,7 @@ import java.util.Collections; import java.util.Set; import java.util.function.Function; +import org.jooq.DSLContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -38,19 +38,15 @@ public class ConfigsDatabaseInstance extends BaseDatabaseInstance implements Dat private Database database; - public ConfigsDatabaseInstance(final String username, final String password, final String connectionString) throws IOException { - super(username, password, connectionString, MoreResources.readResource(SCHEMA_PATH), DATABASE_LOGGING_NAME, INITIAL_EXPECTED_TABLES, + public ConfigsDatabaseInstance(final DSLContext dslContext) throws IOException { + super(dslContext, DATABASE_LOGGING_NAME, MoreResources.readResource(SCHEMA_PATH), INITIAL_EXPECTED_TABLES, IS_CONFIGS_DATABASE_READY); } @Override public boolean isInitialized() throws IOException { if (database == null) { - database = Databases.createPostgresDatabaseWithRetry( - username, - password, - connectionString, - isDatabaseConnected(databaseName)); + database = Database.createWithRetry(dslContext, isDatabaseConnected(databaseName)); } return new ExceptionWrappingDatabase(database).transaction(ctx -> { @@ -73,11 +69,7 @@ public Database getAndInitialize() throws IOException { // we connect to the database. So the database itself is considered ready as long as // the connection is alive. if (database == null) { - database = Databases.createPostgresDatabaseWithRetry( - username, - password, - connectionString, - isDatabaseConnected(databaseName)); + database = Database.createWithRetry(dslContext, isDatabaseConnected(databaseName)); } new ExceptionWrappingDatabase(database).transaction(ctx -> { diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/configs/ConfigsDatabaseMigrationDevCenter.java b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/configs/ConfigsDatabaseMigrationDevCenter.java index 2c52bf7dcea16..8fc570edbdd4f 100644 --- a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/configs/ConfigsDatabaseMigrationDevCenter.java +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/configs/ConfigsDatabaseMigrationDevCenter.java @@ -5,10 +5,13 @@ package io.airbyte.db.instance.configs; import io.airbyte.db.Database; +import io.airbyte.db.factory.FlywayFactory; import io.airbyte.db.instance.FlywayDatabaseMigrator; import io.airbyte.db.instance.development.MigrationDevCenter; import java.io.IOException; -import org.testcontainers.containers.PostgreSQLContainer; +import javax.sql.DataSource; +import org.flywaydb.core.Flyway; +import org.jooq.DSLContext; /** * Helper class for migration development. See README for details. @@ -20,13 +23,19 @@ public ConfigsDatabaseMigrationDevCenter() { } @Override - protected FlywayDatabaseMigrator getMigrator(final Database database) { - return new ConfigsDatabaseMigrator(database, ConfigsDatabaseMigrationDevCenter.class.getSimpleName()); + protected FlywayDatabaseMigrator getMigrator(final Database database, final Flyway flyway) { + return new ConfigsDatabaseMigrator(database, flyway); } @Override - protected Database getDatabase(final PostgreSQLContainer container) throws IOException { - return new ConfigsDatabaseInstance(container.getUsername(), container.getPassword(), container.getJdbcUrl()).getAndInitialize(); + protected Database getDatabase(final DSLContext dslContext) throws IOException { + return new ConfigsDatabaseInstance(dslContext).getAndInitialize(); + } + + @Override + protected Flyway getFlyway(final DataSource dataSource) { + return FlywayFactory.create(dataSource, getClass().getSimpleName(), ConfigsDatabaseMigrator.DB_IDENTIFIER, + ConfigsDatabaseMigrator.MIGRATION_FILE_LOCATION); } } diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/configs/ConfigsDatabaseMigrator.java b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/configs/ConfigsDatabaseMigrator.java index f08199ec3d195..213758270cac2 100644 --- a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/configs/ConfigsDatabaseMigrator.java +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/configs/ConfigsDatabaseMigrator.java @@ -6,14 +6,15 @@ import io.airbyte.db.Database; import io.airbyte.db.instance.FlywayDatabaseMigrator; +import org.flywaydb.core.Flyway; public class ConfigsDatabaseMigrator extends FlywayDatabaseMigrator { public static final String DB_IDENTIFIER = "configs"; public static final String MIGRATION_FILE_LOCATION = "classpath:io/airbyte/db/instance/configs/migrations"; - public ConfigsDatabaseMigrator(final Database database, final String migrationRunner) { - super(database, DB_IDENTIFIER, migrationRunner, MIGRATION_FILE_LOCATION); + public ConfigsDatabaseMigrator(final Database database, final Flyway flyway) { + super(database, flyway); } } diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/configs/ConfigsDatabaseTestProvider.java b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/configs/ConfigsDatabaseTestProvider.java index df2b145ff3535..4293e815cb25f 100644 --- a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/configs/ConfigsDatabaseTestProvider.java +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/configs/ConfigsDatabaseTestProvider.java @@ -15,29 +15,28 @@ import java.io.IOException; import java.time.OffsetDateTime; import java.util.UUID; +import org.flywaydb.core.Flyway; +import org.jooq.DSLContext; import org.jooq.JSONB; public class ConfigsDatabaseTestProvider implements TestDatabaseProvider { - private final String user; - private final String password; - private final String jdbcUrl; + private final DSLContext dslContext; + private final Flyway flyway; - public ConfigsDatabaseTestProvider(final String user, final String password, final String jdbcUrl) { - this.user = user; - this.password = password; - this.jdbcUrl = jdbcUrl; + public ConfigsDatabaseTestProvider(final DSLContext dslContext, final Flyway flyway) { + this.dslContext = dslContext; + this.flyway = flyway; } @Override public Database create(final boolean runMigration) throws IOException { - final Database database = new ConfigsDatabaseInstance(user, password, jdbcUrl) + final Database database = new ConfigsDatabaseInstance(dslContext) .getAndInitialize(); if (runMigration) { final DatabaseMigrator migrator = new ConfigsDatabaseMigrator( - database, - ConfigsDatabaseTestProvider.class.getSimpleName()); + database, flyway); migrator.createBaseline(); migrator.migrate(); } else { diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/configs/ConfigsFlywayMigrationDatabase.java b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/configs/ConfigsFlywayMigrationDatabase.java index e9662e75748cb..2550e1a2ec7b0 100644 --- a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/configs/ConfigsFlywayMigrationDatabase.java +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/configs/ConfigsFlywayMigrationDatabase.java @@ -8,6 +8,8 @@ import io.airbyte.db.instance.DatabaseMigrator; import io.airbyte.db.instance.FlywayMigrationDatabase; import java.io.IOException; +import org.flywaydb.core.Flyway; +import org.jooq.DSLContext; /** * Configs database for jOOQ code generation. @@ -15,13 +17,28 @@ public class ConfigsFlywayMigrationDatabase extends FlywayMigrationDatabase { @Override - protected Database getAndInitializeDatabase(final String username, final String password, final String connectionString) throws IOException { - return new ConfigsDatabaseInstance(username, password, connectionString).getAndInitialize(); + protected Database getAndInitializeDatabase(final DSLContext dslContext) throws IOException { + return new ConfigsDatabaseInstance(dslContext).getAndInitialize(); } @Override - protected DatabaseMigrator getDatabaseMigrator(final Database database) { - return new ConfigsDatabaseMigrator(database, ConfigsFlywayMigrationDatabase.class.getSimpleName()); + protected DatabaseMigrator getDatabaseMigrator(final Database database, final Flyway flyway) { + return new ConfigsDatabaseMigrator(database, flyway); + } + + @Override + protected String getInstalledBy() { + return ConfigsFlywayMigrationDatabase.class.getSimpleName(); + } + + @Override + protected String getDbIdentifier() { + return ConfigsDatabaseMigrator.DB_IDENTIFIER; + } + + @Override + protected String[] getMigrationFileLocations() { + return new String[] {ConfigsDatabaseMigrator.MIGRATION_FILE_LOCATION}; } } diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_30_22_001__Store_last_sync_state.java b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_30_22_001__Store_last_sync_state.java index ce1e8f1debe69..2844b4b0c2629 100644 --- a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_30_22_001__Store_last_sync_state.java +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_30_22_001__Store_last_sync_state.java @@ -13,6 +13,8 @@ import io.airbyte.config.StandardSyncState; import io.airbyte.config.State; import io.airbyte.db.Database; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.db.instance.jobs.JobsDatabaseInstance; import java.io.IOException; import java.sql.SQLException; @@ -26,6 +28,7 @@ import org.jooq.DSLContext; import org.jooq.Field; import org.jooq.JSONB; +import org.jooq.SQLDialect; import org.jooq.Table; import org.jooq.impl.DSL; import org.jooq.impl.SQLDataType; @@ -115,7 +118,9 @@ static Optional getJobsDatabase(final String databaseUser, final Strin } // If the environment variables exist, it means the migration is run in production. // Connect to the official job database. - final Database jobsDatabase = new JobsDatabaseInstance(databaseUser, databasePassword, databaseUrl).getInitialized(); + final DSLContext dslContext = + DSLContextFactory.create(databaseUser, databasePassword, DatabaseDriver.POSTGRESQL.getDriverClassName(), databaseUrl, SQLDialect.POSTGRES); + final Database jobsDatabase = new JobsDatabaseInstance(dslContext).getInitialized(); LOGGER.info("[{}] Connected to jobs database: {}", MIGRATION_NAME, databaseUrl); return Optional.of(jobsDatabase); } catch (final IllegalArgumentException e) { diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/development/MigrationDevCenter.java b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/development/MigrationDevCenter.java index 58f26c184d35e..379ab7a56f0ec 100644 --- a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/development/MigrationDevCenter.java +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/development/MigrationDevCenter.java @@ -5,10 +5,16 @@ package io.airbyte.db.instance.development; import io.airbyte.db.Database; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DataSourceFactory; import io.airbyte.db.instance.FlywayDatabaseMigrator; import io.airbyte.db.instance.configs.ConfigsDatabaseMigrationDevCenter; import io.airbyte.db.instance.jobs.JobsDatabaseMigrationDevCenter; import java.io.IOException; +import javax.sql.DataSource; +import org.flywaydb.core.Flyway; +import org.jooq.DSLContext; +import org.jooq.SQLDialect; import org.testcontainers.containers.PostgreSQLContainer; /** @@ -44,37 +50,57 @@ private static PostgreSQLContainer createContainer() { return container; } - protected abstract FlywayDatabaseMigrator getMigrator(Database database); + protected abstract FlywayDatabaseMigrator getMigrator(Database database, Flyway flyway); - protected abstract Database getDatabase(PostgreSQLContainer container) throws IOException; + protected abstract Database getDatabase(DSLContext dslContext) throws IOException; + + protected abstract Flyway getFlyway(DataSource dataSource); private void createMigration() { - try (final PostgreSQLContainer container = createContainer(); final Database database = getDatabase(container)) { - final FlywayDatabaseMigrator migrator = getMigrator(database); - MigrationDevHelper.createNextMigrationFile(dbIdentifier, migrator); + try (final PostgreSQLContainer container = createContainer()) { + final DataSource dataSource = + DataSourceFactory.create(container.getUsername(), container.getPassword(), container.getDriverClassName(), container.getJdbcUrl()); + try (final DSLContext dslContext = DSLContextFactory.create(dataSource, SQLDialect.POSTGRES)) { + final Flyway flyway = getFlyway(dataSource); + final Database database = getDatabase(dslContext); + final FlywayDatabaseMigrator migrator = getMigrator(database, flyway); + MigrationDevHelper.createNextMigrationFile(dbIdentifier, migrator); + } } catch (final Exception e) { throw new RuntimeException(e); } } private void runLastMigration() { - try (final PostgreSQLContainer container = createContainer(); final Database database = getDatabase(container)) { - final FlywayDatabaseMigrator fullMigrator = getMigrator(database); - final DevDatabaseMigrator devDatabaseMigrator = new DevDatabaseMigrator(fullMigrator); - MigrationDevHelper.runLastMigration(devDatabaseMigrator); - final String schema = fullMigrator.dumpSchema(); - MigrationDevHelper.dumpSchema(schema, schemaDumpFile, false); + try (final PostgreSQLContainer container = createContainer()) { + final DataSource dataSource = + DataSourceFactory.create(container.getUsername(), container.getPassword(), container.getDriverClassName(), container.getJdbcUrl()); + try (final DSLContext dslContext = DSLContextFactory.create(dataSource, SQLDialect.POSTGRES)) { + final Flyway flyway = getFlyway(dataSource); + final Database database = getDatabase(dslContext); + final FlywayDatabaseMigrator fullMigrator = getMigrator(database, flyway); + final DevDatabaseMigrator devDatabaseMigrator = new DevDatabaseMigrator(fullMigrator); + MigrationDevHelper.runLastMigration(devDatabaseMigrator); + final String schema = fullMigrator.dumpSchema(); + MigrationDevHelper.dumpSchema(schema, schemaDumpFile, false); + } } catch (final Exception e) { throw new RuntimeException(e); } } private void dumpSchema() { - try (final PostgreSQLContainer container = createContainer(); final Database database = getDatabase(container)) { - final FlywayDatabaseMigrator migrator = getMigrator(database); - migrator.migrate(); - final String schema = migrator.dumpSchema(); - MigrationDevHelper.dumpSchema(schema, schemaDumpFile, true); + try (final PostgreSQLContainer container = createContainer()) { + final DataSource dataSource = + DataSourceFactory.create(container.getUsername(), container.getPassword(), container.getDriverClassName(), container.getJdbcUrl()); + try (final DSLContext dslContext = DSLContextFactory.create(dataSource, SQLDialect.POSTGRES)) { + final Flyway flyway = getFlyway(dataSource); + final Database database = getDatabase(dslContext); + final FlywayDatabaseMigrator migrator = getMigrator(database, flyway); + migrator.migrate(); + final String schema = migrator.dumpSchema(); + MigrationDevHelper.dumpSchema(schema, schemaDumpFile, true); + } } catch (final Exception e) { throw new RuntimeException(e); } diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/jobs/JobsDatabaseInstance.java b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/jobs/JobsDatabaseInstance.java index 6464b18b7e078..8d378e07f8445 100644 --- a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/jobs/JobsDatabaseInstance.java +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/jobs/JobsDatabaseInstance.java @@ -11,6 +11,7 @@ import io.airbyte.db.instance.DatabaseInstance; import java.io.IOException; import java.util.function.Function; +import org.jooq.DSLContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -30,12 +31,12 @@ public class JobsDatabaseInstance extends BaseDatabaseInstance implements Databa }; @VisibleForTesting - public JobsDatabaseInstance(final String username, final String password, final String connectionString, final String schema) { - super(username, password, connectionString, schema, DATABASE_LOGGING_NAME, JobsDatabaseSchema.getTableNames(), IS_JOBS_DATABASE_READY); + public JobsDatabaseInstance(final DSLContext dslContext, final String schema) { + super(dslContext, DATABASE_LOGGING_NAME, schema, JobsDatabaseSchema.getTableNames(), IS_JOBS_DATABASE_READY); } - public JobsDatabaseInstance(final String username, final String password, final String connectionString) throws IOException { - this(username, password, connectionString, MoreResources.readResource(SCHEMA_PATH)); + public JobsDatabaseInstance(final DSLContext dslContext) throws IOException { + this(dslContext, MoreResources.readResource(SCHEMA_PATH)); } } diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/jobs/JobsDatabaseMigrationDevCenter.java b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/jobs/JobsDatabaseMigrationDevCenter.java index 733385f6b9768..e73e0890685fa 100644 --- a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/jobs/JobsDatabaseMigrationDevCenter.java +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/jobs/JobsDatabaseMigrationDevCenter.java @@ -5,10 +5,14 @@ package io.airbyte.db.instance.jobs; import io.airbyte.db.Database; +import io.airbyte.db.factory.FlywayFactory; import io.airbyte.db.instance.FlywayDatabaseMigrator; +import io.airbyte.db.instance.configs.ConfigsDatabaseInstance; import io.airbyte.db.instance.development.MigrationDevCenter; import java.io.IOException; -import org.testcontainers.containers.PostgreSQLContainer; +import javax.sql.DataSource; +import org.flywaydb.core.Flyway; +import org.jooq.DSLContext; /** * Helper class for migration development. See README for details. @@ -20,13 +24,19 @@ public JobsDatabaseMigrationDevCenter() { } @Override - protected FlywayDatabaseMigrator getMigrator(final Database database) { - return new JobsDatabaseMigrator(database, JobsDatabaseMigrationDevCenter.class.getSimpleName()); + protected FlywayDatabaseMigrator getMigrator(final Database database, final Flyway flyway) { + return new JobsDatabaseMigrator(database, flyway); } @Override - protected Database getDatabase(final PostgreSQLContainer container) throws IOException { - return new JobsDatabaseInstance(container.getUsername(), container.getPassword(), container.getJdbcUrl()).getAndInitialize(); + protected Database getDatabase(final DSLContext dslContext) throws IOException { + return new ConfigsDatabaseInstance(dslContext).getAndInitialize(); + } + + @Override + protected Flyway getFlyway(final DataSource dataSource) { + return FlywayFactory.create(dataSource, getClass().getSimpleName(), JobsDatabaseMigrator.DB_IDENTIFIER, + JobsDatabaseMigrator.MIGRATION_FILE_LOCATION); } } diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/jobs/JobsDatabaseMigrator.java b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/jobs/JobsDatabaseMigrator.java index 5bfb5aca52464..3d3f9dc010fa7 100644 --- a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/jobs/JobsDatabaseMigrator.java +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/jobs/JobsDatabaseMigrator.java @@ -6,14 +6,15 @@ import io.airbyte.db.Database; import io.airbyte.db.instance.FlywayDatabaseMigrator; +import org.flywaydb.core.Flyway; public class JobsDatabaseMigrator extends FlywayDatabaseMigrator { public static final String DB_IDENTIFIER = "jobs"; public static final String MIGRATION_FILE_LOCATION = "classpath:io/airbyte/db/instance/jobs/migrations"; - public JobsDatabaseMigrator(final Database database, final String migrationRunner) { - super(database, DB_IDENTIFIER, migrationRunner, MIGRATION_FILE_LOCATION); + public JobsDatabaseMigrator(final Database database, final Flyway flyway) { + super(database, flyway); } } diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/jobs/JobsDatabaseTestProvider.java b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/jobs/JobsDatabaseTestProvider.java index 133b9c2908073..850a020309aa9 100644 --- a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/jobs/JobsDatabaseTestProvider.java +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/jobs/JobsDatabaseTestProvider.java @@ -8,28 +8,27 @@ import io.airbyte.db.instance.DatabaseMigrator; import io.airbyte.db.instance.test.TestDatabaseProvider; import java.io.IOException; +import org.flywaydb.core.Flyway; +import org.jooq.DSLContext; public class JobsDatabaseTestProvider implements TestDatabaseProvider { - private final String user; - private final String password; - private final String jdbcUrl; + private final DSLContext dslContext; + private final Flyway flyway; - public JobsDatabaseTestProvider(String user, String password, String jdbcUrl) { - this.user = user; - this.password = password; - this.jdbcUrl = jdbcUrl; + public JobsDatabaseTestProvider(final DSLContext dslContext, final Flyway flyway) { + this.dslContext = dslContext; + this.flyway = flyway; } @Override public Database create(final boolean runMigration) throws IOException { - final Database jobsDatabase = new JobsDatabaseInstance(user, password, jdbcUrl) + final Database jobsDatabase = new JobsDatabaseInstance(dslContext) .getAndInitialize(); if (runMigration) { final DatabaseMigrator migrator = new JobsDatabaseMigrator( - jobsDatabase, - JobsDatabaseTestProvider.class.getSimpleName()); + jobsDatabase, flyway); migrator.createBaseline(); migrator.migrate(); } diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/jobs/JobsFlywayMigrationDatabase.java b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/jobs/JobsFlywayMigrationDatabase.java index 6bcb85e0528b8..491fa3dc476b1 100644 --- a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/jobs/JobsFlywayMigrationDatabase.java +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/jobs/JobsFlywayMigrationDatabase.java @@ -8,6 +8,8 @@ import io.airbyte.db.instance.DatabaseMigrator; import io.airbyte.db.instance.FlywayMigrationDatabase; import java.io.IOException; +import org.flywaydb.core.Flyway; +import org.jooq.DSLContext; /** * Jobs database for jOOQ code generation. @@ -15,13 +17,28 @@ public class JobsFlywayMigrationDatabase extends FlywayMigrationDatabase { @Override - protected Database getAndInitializeDatabase(final String username, final String password, final String connectionString) throws IOException { - return new JobsDatabaseInstance(username, password, connectionString).getAndInitialize(); + protected Database getAndInitializeDatabase(final DSLContext dslContext) throws IOException { + return new JobsDatabaseInstance(dslContext).getAndInitialize(); } @Override - protected DatabaseMigrator getDatabaseMigrator(final Database database) { - return new JobsDatabaseMigrator(database, JobsFlywayMigrationDatabase.class.getSimpleName()); + protected DatabaseMigrator getDatabaseMigrator(final Database database, final Flyway flyway) { + return new JobsDatabaseMigrator(database, flyway); + } + + @Override + protected String getInstalledBy() { + return JobsFlywayMigrationDatabase.class.getSimpleName(); + } + + @Override + protected String getDbIdentifier() { + return JobsDatabaseMigrator.DB_IDENTIFIER; + } + + @Override + protected String[] getMigrationFileLocations() { + return new String[] {JobsDatabaseMigrator.MIGRATION_FILE_LOCATION}; } } diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/test/TestDatabaseProviders.java b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/test/TestDatabaseProviders.java index b15cc81db76a2..04681147bda6d 100644 --- a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/test/TestDatabaseProviders.java +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/test/TestDatabaseProviders.java @@ -4,14 +4,16 @@ package io.airbyte.db.instance.test; -import com.google.api.client.util.Preconditions; -import io.airbyte.config.Configs; import io.airbyte.db.Database; +import io.airbyte.db.factory.FlywayFactory; +import io.airbyte.db.instance.configs.ConfigsDatabaseMigrator; import io.airbyte.db.instance.configs.ConfigsDatabaseTestProvider; +import io.airbyte.db.instance.jobs.JobsDatabaseMigrator; import io.airbyte.db.instance.jobs.JobsDatabaseTestProvider; import java.io.IOException; -import java.util.Optional; -import org.testcontainers.containers.PostgreSQLContainer; +import javax.sql.DataSource; +import org.flywaydb.core.Flyway; +import org.jooq.DSLContext; /** * Use this class to create mock databases in unit tests. This class takes care of database @@ -20,13 +22,13 @@ @SuppressWarnings("OptionalUsedAsFieldOrParameterType") public class TestDatabaseProviders { - private final Optional configs; - private final Optional> container; + private final DataSource dataSource; + private final DSLContext dslContext; private boolean runMigration = true; - public TestDatabaseProviders(final PostgreSQLContainer container) { - this.configs = Optional.empty(); - this.container = Optional.of(container); + public TestDatabaseProviders(final DataSource dataSource, final DSLContext dslContext) { + this.dataSource = dataSource; + this.dslContext = dslContext; } /** @@ -39,41 +41,17 @@ public TestDatabaseProviders turnOffMigration() { } public Database createNewConfigsDatabase() throws IOException { - Preconditions.checkArgument(configs.isPresent() || container.isPresent()); - if (configs.isPresent()) { - final Configs c = configs.get(); - return new ConfigsDatabaseTestProvider( - c.getConfigDatabaseUser(), - c.getConfigDatabasePassword(), - c.getConfigDatabaseUrl()) - .create(runMigration); - } else { - final PostgreSQLContainer c = container.get(); - return new ConfigsDatabaseTestProvider( - c.getUsername(), - c.getPassword(), - c.getJdbcUrl()) - .create(runMigration); - } + final Flyway flyway = FlywayFactory.create(dataSource, ConfigsDatabaseTestProvider.class.getSimpleName(), ConfigsDatabaseMigrator.DB_IDENTIFIER, + ConfigsDatabaseMigrator.MIGRATION_FILE_LOCATION); + return new ConfigsDatabaseTestProvider(dslContext, flyway) + .create(runMigration); } public Database createNewJobsDatabase() throws IOException { - Preconditions.checkArgument(configs.isPresent() || container.isPresent()); - if (configs.isPresent()) { - final Configs c = configs.get(); - return new JobsDatabaseTestProvider( - c.getDatabaseUser(), - c.getDatabasePassword(), - c.getDatabaseUrl()) - .create(runMigration); - } else { - final PostgreSQLContainer c = container.get(); - return new JobsDatabaseTestProvider( - c.getUsername(), - c.getPassword(), - c.getJdbcUrl()) - .create(runMigration); - } + final Flyway flyway = FlywayFactory.create(dataSource, JobsDatabaseTestProvider.class.getSimpleName(), JobsDatabaseMigrator.DB_IDENTIFIER, + JobsDatabaseMigrator.MIGRATION_FILE_LOCATION); + return new JobsDatabaseTestProvider(dslContext, flyway) + .create(runMigration); } } diff --git a/airbyte-db/lib/src/test/java/io/airbyte/db/PostgresUtilsTest.java b/airbyte-db/lib/src/test/java/io/airbyte/db/PostgresUtilsTest.java index a32262bd0d2d8..e5d4c1dac7fbe 100644 --- a/airbyte-db/lib/src/test/java/io/airbyte/db/PostgresUtilsTest.java +++ b/airbyte-db/lib/src/test/java/io/airbyte/db/PostgresUtilsTest.java @@ -12,11 +12,13 @@ import io.airbyte.commons.io.IOs; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.string.Strings; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.test.utils.PostgreSQLContainerHelper; import java.sql.SQLException; -import org.apache.commons.dbcp2.BasicDataSource; +import javax.sql.DataSource; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -27,7 +29,7 @@ class PostgresUtilsTest { private static PostgreSQLContainer PSQL_DB; - private BasicDataSource dataSource; + private DataSource dataSource; @BeforeAll static void init() { @@ -46,14 +48,14 @@ void setup() throws Exception { final String tmpFilePath = IOs.writeFileToRandomTmpDir(initScriptName, "CREATE DATABASE " + dbName + ";"); PostgreSQLContainerHelper.runSqlScript(MountableFile.forHostPath(tmpFilePath), PSQL_DB); - dataSource = new BasicDataSource(); - dataSource.setDriverClassName("org.postgresql.Driver"); - dataSource.setUsername(config.get("username").asText()); - dataSource.setPassword(config.get("password").asText()); - dataSource.setUrl(String.format("jdbc:postgresql://%s:%s/%s", - config.get("host").asText(), - config.get("port").asText(), - config.get("database").asText())); + dataSource = DataSourceFactory.create( + config.get("username").asText(), + config.get("password").asText(), + DatabaseDriver.POSTGRESQL.getDriverClassName(), + String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), + config.get("host").asText(), + config.get("port").asInt(), + config.get("database").asText())); final JdbcDatabase defaultJdbcDatabase = new DefaultJdbcDatabase(dataSource); diff --git a/airbyte-db/lib/src/test/java/io/airbyte/db/factory/DSLContextFactoryTest.java b/airbyte-db/lib/src/test/java/io/airbyte/db/factory/DSLContextFactoryTest.java index b4bae85c24f92..c447c2b3ca3ba 100644 --- a/airbyte-db/lib/src/test/java/io/airbyte/db/factory/DSLContextFactoryTest.java +++ b/airbyte-db/lib/src/test/java/io/airbyte/db/factory/DSLContextFactoryTest.java @@ -7,11 +7,11 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; +import java.util.Map; import javax.sql.DataSource; import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.junit.jupiter.api.Test; -import org.postgresql.Driver; /** * Test suite for the {@link DSLContextFactory} class. @@ -21,11 +21,39 @@ public class DSLContextFactoryTest extends AbstractFactoryTest { @Test void testCreatingADslContext() { final DataSource dataSource = - DataSourceFactory.create(container.getUsername(), container.getPassword(), Driver.class.getName(), container.getJdbcUrl()); + DataSourceFactory.create(container.getUsername(), container.getPassword(), container.getDriverClassName(), container.getJdbcUrl()); final SQLDialect dialect = SQLDialect.POSTGRES; final DSLContext dslContext = DSLContextFactory.create(dataSource, dialect); assertNotNull(dslContext); assertEquals(dialect, dslContext.configuration().dialect()); } + @Test + void testCreatingADslContextWithIndividualConfiguration() { + final SQLDialect dialect = SQLDialect.POSTGRES; + final DSLContext dslContext = DSLContextFactory.create( + container.getUsername(), + container.getPassword(), + container.getDriverClassName(), + container.getJdbcUrl(), + dialect); + assertNotNull(dslContext); + assertEquals(dialect, dslContext.configuration().dialect()); + } + + @Test + void testCreatingADslContextWithIndividualConfigurationAndConnectionProperties() { + final Map connectionProperties = Map.of("foo", "bar"); + final SQLDialect dialect = SQLDialect.POSTGRES; + final DSLContext dslContext = DSLContextFactory.create( + container.getUsername(), + container.getPassword(), + container.getDriverClassName(), + container.getJdbcUrl(), + dialect, + connectionProperties); + assertNotNull(dslContext); + assertEquals(dialect, dslContext.configuration().dialect()); + } + } diff --git a/airbyte-db/lib/src/test/java/io/airbyte/db/factory/DataSourceFactoryTest.java b/airbyte-db/lib/src/test/java/io/airbyte/db/factory/DataSourceFactoryTest.java index 4cfe7cc141245..15fd845934ec3 100644 --- a/airbyte-db/lib/src/test/java/io/airbyte/db/factory/DataSourceFactoryTest.java +++ b/airbyte-db/lib/src/test/java/io/airbyte/db/factory/DataSourceFactoryTest.java @@ -12,7 +12,6 @@ import java.util.Map; import javax.sql.DataSource; import org.junit.jupiter.api.Test; -import org.postgresql.Driver; /** * Test suite for the {@link DataSourceFactory} class. @@ -23,32 +22,34 @@ public class DataSourceFactoryTest extends AbstractFactoryTest { void testCreatingADataSourceWithJdbcUrl() { final String username = container.getUsername(); final String password = container.getPassword(); - final String driverClassName = Driver.class.getName(); + final String driverClassName = container.getDriverClassName(); final String jdbcUrl = container.getJdbcUrl(); final DataSource dataSource = DataSourceFactory.create(username, password, driverClassName, jdbcUrl); assertNotNull(dataSource); assertEquals(HikariDataSource.class, dataSource.getClass()); + assertEquals(5, ((HikariDataSource) dataSource).getHikariConfigMXBean().getMaximumPoolSize()); } @Test void testCreatingADataSourceWithJdbcUrlAndConnectionProperties() { final String username = container.getUsername(); final String password = container.getPassword(); - final String driverClassName = Driver.class.getName(); + final String driverClassName = container.getDriverClassName(); final String jdbcUrl = container.getJdbcUrl(); final Map connectionProperties = Map.of("foo", "bar"); final DataSource dataSource = DataSourceFactory.create(username, password, driverClassName, jdbcUrl, connectionProperties); assertNotNull(dataSource); assertEquals(HikariDataSource.class, dataSource.getClass()); + assertEquals(5, ((HikariDataSource) dataSource).getHikariConfigMXBean().getMaximumPoolSize()); } @Test void testCreatingADataSourceWithHostAndPort() { final String username = container.getUsername(); final String password = container.getPassword(); - final String driverClassName = Driver.class.getName(); + final String driverClassName = container.getDriverClassName(); final String host = container.getHost(); final Integer port = container.getFirstMappedPort(); final String database = container.getDatabaseName(); @@ -56,13 +57,14 @@ void testCreatingADataSourceWithHostAndPort() { final DataSource dataSource = DataSourceFactory.create(username, password, host, port, database, driverClassName); assertNotNull(dataSource); assertEquals(HikariDataSource.class, dataSource.getClass()); + assertEquals(5, ((HikariDataSource) dataSource).getHikariConfigMXBean().getMaximumPoolSize()); } @Test void testCreatingADataSourceWithHostPortAndConnectionProperties() { final String username = container.getUsername(); final String password = container.getPassword(); - final String driverClassName = Driver.class.getName(); + final String driverClassName = container.getDriverClassName(); final String host = container.getHost(); final Integer port = container.getFirstMappedPort(); final String database = container.getDatabaseName(); @@ -71,6 +73,7 @@ void testCreatingADataSourceWithHostPortAndConnectionProperties() { final DataSource dataSource = DataSourceFactory.create(username, password, host, port, database, driverClassName, connectionProperties); assertNotNull(dataSource); assertEquals(HikariDataSource.class, dataSource.getClass()); + assertEquals(5, ((HikariDataSource) dataSource).getHikariConfigMXBean().getMaximumPoolSize()); } @Test @@ -98,6 +101,7 @@ void testCreatingAPostgresqlDataSource() { final DataSource dataSource = DataSourceFactory.createPostgres(username, password, host, port, database); assertNotNull(dataSource); assertEquals(HikariDataSource.class, dataSource.getClass()); + assertEquals(5, ((HikariDataSource) dataSource).getHikariConfigMXBean().getMaximumPoolSize()); } } diff --git a/airbyte-db/lib/src/test/java/io/airbyte/db/factory/FlywayFactoryTest.java b/airbyte-db/lib/src/test/java/io/airbyte/db/factory/FlywayFactoryTest.java index 2c2913261b283..8782605dbb0a5 100644 --- a/airbyte-db/lib/src/test/java/io/airbyte/db/factory/FlywayFactoryTest.java +++ b/airbyte-db/lib/src/test/java/io/airbyte/db/factory/FlywayFactoryTest.java @@ -8,10 +8,10 @@ import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; +import io.airbyte.test.utils.DatabaseConnectionHelper; import javax.sql.DataSource; import org.flywaydb.core.Flyway; import org.junit.jupiter.api.Test; -import org.postgresql.Driver; /** * Test suite for the {@link FlywayFactory} class. @@ -22,15 +22,35 @@ public class FlywayFactoryTest extends AbstractFactoryTest { void testCreatingAFlywayInstance() { final String installedBy = "test"; final String dbIdentifier = "test"; + final String baselineVersion = "1.2.3"; + final String baselineDescription = "A test baseline description"; + final boolean baselineOnMigrate = true; final String migrationFileLocation = "classpath:io/airbyte/db/instance/toys/migrations"; - final DataSource dataSource = - DataSourceFactory.create(container.getUsername(), container.getPassword(), Driver.class.getName(), container.getJdbcUrl()); + final DataSource dataSource = DatabaseConnectionHelper.createDataSource(container); + final Flyway flyway = + FlywayFactory.create(dataSource, installedBy, dbIdentifier, baselineVersion, baselineDescription, baselineOnMigrate, migrationFileLocation); + assertNotNull(flyway); + assertTrue(flyway.getConfiguration().isBaselineOnMigrate()); + assertEquals(baselineDescription, flyway.getConfiguration().getBaselineDescription()); + assertEquals(baselineVersion, flyway.getConfiguration().getBaselineVersion().getVersion()); + assertEquals(baselineOnMigrate, flyway.getConfiguration().isBaselineOnMigrate()); + assertEquals(installedBy, flyway.getConfiguration().getInstalledBy()); + assertEquals(String.format(FlywayFactory.MIGRATION_TABLE_FORMAT, dbIdentifier), flyway.getConfiguration().getTable()); + assertEquals(migrationFileLocation, flyway.getConfiguration().getLocations()[0].getDescriptor()); + } + @Test + void testCreatingAFlywayInstanceWithDefaults() { + final String installedBy = "test"; + final String dbIdentifier = "test"; + final String migrationFileLocation = "classpath:io/airbyte/db/instance/toys/migrations"; + final DataSource dataSource = DatabaseConnectionHelper.createDataSource(container); final Flyway flyway = FlywayFactory.create(dataSource, installedBy, dbIdentifier, migrationFileLocation); assertNotNull(flyway); assertTrue(flyway.getConfiguration().isBaselineOnMigrate()); assertEquals(FlywayFactory.BASELINE_DESCRIPTION, flyway.getConfiguration().getBaselineDescription()); assertEquals(FlywayFactory.BASELINE_VERSION, flyway.getConfiguration().getBaselineVersion().getVersion()); + assertEquals(FlywayFactory.BASELINE_ON_MIGRATION, flyway.getConfiguration().isBaselineOnMigrate()); assertEquals(installedBy, flyway.getConfiguration().getInstalledBy()); assertEquals(String.format(FlywayFactory.MIGRATION_TABLE_FORMAT, dbIdentifier), flyway.getConfiguration().getTable()); assertEquals(migrationFileLocation, flyway.getConfiguration().getLocations()[0].getDescriptor()); diff --git a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/AbstractDatabaseTest.java b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/AbstractDatabaseTest.java index de040b91c3449..5fbb5723b89a9 100644 --- a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/AbstractDatabaseTest.java +++ b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/AbstractDatabaseTest.java @@ -5,7 +5,13 @@ package io.airbyte.db.instance; import io.airbyte.db.Database; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.test.utils.DatabaseConnectionHelper; +import java.io.Closeable; import java.io.IOException; +import javax.sql.DataSource; +import org.jooq.DSLContext; +import org.jooq.SQLDialect; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; @@ -31,22 +37,41 @@ public static void dbDown() { } protected Database database; + protected DataSource dataSource; + protected DSLContext dslContext; @BeforeEach - public void setup() throws Exception { - database = getDatabase(); + public void setup() throws IOException { + dataSource = DatabaseConnectionHelper.createDataSource(container); + dslContext = DSLContextFactory.create(dataSource, SQLDialect.POSTGRES); + database = getDatabase(dataSource, dslContext); } @AfterEach - void tearDown() throws Exception { - database.close(); + void tearDown() throws IOException { + dslContext.close(); + if (dataSource instanceof Closeable closeable) { + closeable.close(); + } } /** - * Create an initialized database. The downstream implementation should do it by calling + * Create an initialized {@link Database}. The downstream implementation should do it by calling * {@link DatabaseInstance#getAndInitialize} or {@link DatabaseInstance#getInitialized}, and * {@link DatabaseMigrator#migrate} if necessary. + * + * @param dataSource The {@link DataSource} used to access the database. + * @param dslContext The {@link DSLContext} used to execute queries. + * @return an initialized {@link Database} instance. */ - public abstract Database getDatabase() throws IOException; + public abstract Database getDatabase(DataSource dataSource, DSLContext dslContext) throws IOException; + + public DataSource getDataSource() { + return dataSource; + } + + public DSLContext getDslContext() { + return dslContext; + } } diff --git a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/BaseDatabaseInstanceTest.java b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/BaseDatabaseInstanceTest.java index 58984362acfb6..ba7e82378a59f 100644 --- a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/BaseDatabaseInstanceTest.java +++ b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/BaseDatabaseInstanceTest.java @@ -8,7 +8,12 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.test.utils.DatabaseConnectionHelper; +import java.io.Closeable; +import javax.sql.DataSource; +import org.jooq.DSLContext; +import org.jooq.SQLDialect; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; @@ -41,18 +46,23 @@ public static void dbDown() { } private Database database; + private DataSource dataSource; + private DSLContext dslContext; @BeforeEach void createDatabase() { - database = Databases.createPostgresDatabaseWithRetry( - container.getUsername(), container.getPassword(), container.getJdbcUrl(), - BaseDatabaseInstance.isDatabaseConnected(DATABASE_NAME)); + dataSource = DatabaseConnectionHelper.createDataSource(container); + dslContext = DSLContextFactory.create(dataSource, SQLDialect.POSTGRES); + database = Database.createWithRetry(dslContext, BaseDatabaseInstance.isDatabaseConnected(DATABASE_NAME)); } @AfterEach void tearDown() throws Exception { database.transaction(ctx -> ctx.execute(String.format("DROP TABLE IF EXISTS %s;", TABLE_NAME))); - database.close(); + dslContext.close(); + if (dataSource instanceof Closeable closeable) { + closeable.close(); + } } @Test diff --git a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/AbstractConfigsDatabaseTest.java b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/AbstractConfigsDatabaseTest.java index c786918ae91a0..bcbd5c0c74462 100644 --- a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/AbstractConfigsDatabaseTest.java +++ b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/AbstractConfigsDatabaseTest.java @@ -8,12 +8,14 @@ import io.airbyte.db.instance.AbstractDatabaseTest; import io.airbyte.db.instance.test.TestDatabaseProviders; import java.io.IOException; +import javax.sql.DataSource; +import org.jooq.DSLContext; public abstract class AbstractConfigsDatabaseTest extends AbstractDatabaseTest { @Override - public Database getDatabase() throws IOException { - return new TestDatabaseProviders(container).turnOffMigration().createNewConfigsDatabase(); + public Database getDatabase(final DataSource dataSource, final DSLContext dslContext) throws IOException { + return new TestDatabaseProviders(dataSource, dslContext).turnOffMigration().createNewConfigsDatabase(); } } diff --git a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/ConfigsDatabaseMigratorTest.java b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/ConfigsDatabaseMigratorTest.java index aa7ab377eecad..219bbba1830d6 100644 --- a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/ConfigsDatabaseMigratorTest.java +++ b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/ConfigsDatabaseMigratorTest.java @@ -4,9 +4,11 @@ package io.airbyte.db.instance.configs; +import io.airbyte.db.factory.FlywayFactory; import io.airbyte.db.instance.DatabaseMigrator; import io.airbyte.db.instance.development.MigrationDevHelper; import java.io.IOException; +import org.flywaydb.core.Flyway; import org.junit.jupiter.api.Test; public class ConfigsDatabaseMigratorTest extends AbstractConfigsDatabaseTest { @@ -15,7 +17,9 @@ public class ConfigsDatabaseMigratorTest extends AbstractConfigsDatabaseTest { @Test public void dumpSchema() throws IOException { - final DatabaseMigrator migrator = new ConfigsDatabaseMigrator(database, ConfigsDatabaseMigratorTest.class.getSimpleName()); + final Flyway flyway = FlywayFactory.create(getDataSource(), getClass().getSimpleName(), ConfigsDatabaseMigrator.DB_IDENTIFIER, + ConfigsDatabaseMigrator.MIGRATION_FILE_LOCATION); + final DatabaseMigrator migrator = new ConfigsDatabaseMigrator(database, flyway); migrator.migrate(); final String schema = migrator.dumpSchema(); MigrationDevHelper.dumpSchema(schema, SCHEMA_DUMP_FILE, false); diff --git a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_30_22_001__Store_last_sync_state_test.java b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_30_22_001__Store_last_sync_state_test.java index c864462a7fe3f..b958954d83383 100644 --- a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_30_22_001__Store_last_sync_state_test.java +++ b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_30_22_001__Store_last_sync_state_test.java @@ -47,7 +47,7 @@ import org.jooq.JSONB; import org.jooq.Table; import org.jooq.impl.SQLDataType; -import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.MethodOrderer; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; @@ -84,17 +84,13 @@ class V0_30_22_001__Store_last_sync_state_test extends AbstractConfigsDatabaseTe private static final StandardSyncState STD_CONNECTION_STATE_3 = getStandardSyncState(CONNECTION_3_ID, CONNECTION_3_STATE); private static final Set STD_CONNECTION_STATES = Set.of(STD_CONNECTION_STATE_2, STD_CONNECTION_STATE_3); - private static Database jobDatabase; + private Database jobDatabase; - @BeforeAll + @BeforeEach @Timeout(value = 2, unit = TimeUnit.MINUTES) - public static void setupJobDatabase() throws Exception { - jobDatabase = new JobsDatabaseInstance( - container.getUsername(), - container.getPassword(), - container.getJdbcUrl()) - .getAndInitialize(); + public void setupJobDatabase() throws Exception { + jobDatabase = new JobsDatabaseInstance(dslContext).getAndInitialize(); } @Test @@ -166,7 +162,7 @@ public void testCopyData() throws SQLException { */ final OffsetDateTime timestamp = timestampWithFullPrecision.withNano(1000 * (timestampWithFullPrecision.getNano() / 1000)); - database.query(ctx -> { + jobDatabase.query(ctx -> { V0_30_22_001__Store_last_sync_state.copyData(ctx, STD_CONNECTION_STATES, timestamp); checkSyncStates(ctx, STD_CONNECTION_STATES, timestamp); @@ -185,7 +181,7 @@ public void testCopyData() throws SQLException { @Test @Order(40) public void testMigration() throws Exception { - database.query(ctx -> ctx.deleteFrom(TABLE_AIRBYTE_CONFIGS) + jobDatabase.query(ctx -> ctx.deleteFrom(TABLE_AIRBYTE_CONFIGS) .where(COLUMN_CONFIG_TYPE.eq(ConfigSchema.STANDARD_SYNC_STATE.name())) .execute()); @@ -201,7 +197,7 @@ public Configuration getConfiguration() { @Override public Connection getConnection() { try { - return database.getDataSource().getConnection(); + return dataSource.getConnection(); } catch (final SQLException e) { throw new RuntimeException(e); } @@ -209,7 +205,7 @@ public Connection getConnection() { }; migration.migrate(context); - database.query(ctx -> { + jobDatabase.query(ctx -> { checkSyncStates(ctx, STD_CONNECTION_STATES, null); return null; }); diff --git a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_32_8_001__AirbyteConfigDatabaseDenormalization_Test.java b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_32_8_001__AirbyteConfigDatabaseDenormalization_Test.java index d2020d279b192..289085ed9c5e3 100644 --- a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_32_8_001__AirbyteConfigDatabaseDenormalization_Test.java +++ b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_32_8_001__AirbyteConfigDatabaseDenormalization_Test.java @@ -4,8 +4,13 @@ package io.airbyte.db.instance.configs.migrations; -import static io.airbyte.db.instance.configs.migrations.SetupForNormalizedTablesTest.*; +import static io.airbyte.db.instance.configs.migrations.SetupForNormalizedTablesTest.destinationConnections; import static io.airbyte.db.instance.configs.migrations.SetupForNormalizedTablesTest.destinationOauthParameters; +import static io.airbyte.db.instance.configs.migrations.SetupForNormalizedTablesTest.now; +import static io.airbyte.db.instance.configs.migrations.SetupForNormalizedTablesTest.sourceConnections; +import static io.airbyte.db.instance.configs.migrations.SetupForNormalizedTablesTest.sourceOauthParameters; +import static io.airbyte.db.instance.configs.migrations.SetupForNormalizedTablesTest.standardDestinationDefinitions; +import static io.airbyte.db.instance.configs.migrations.SetupForNormalizedTablesTest.standardSourceDefinitions; import static io.airbyte.db.instance.configs.migrations.SetupForNormalizedTablesTest.standardSyncOperations; import static io.airbyte.db.instance.configs.migrations.SetupForNormalizedTablesTest.standardSyncStates; import static io.airbyte.db.instance.configs.migrations.SetupForNormalizedTablesTest.standardSyncs; @@ -31,7 +36,6 @@ import io.airbyte.config.StandardSyncState; import io.airbyte.config.StandardWorkspace; import io.airbyte.config.State; -import io.airbyte.db.Database; import io.airbyte.db.instance.configs.AbstractConfigsDatabaseTest; import io.airbyte.db.instance.configs.migrations.V0_32_8_001__AirbyteConfigDatabaseDenormalization.ActorType; import io.airbyte.db.instance.configs.migrations.V0_32_8_001__AirbyteConfigDatabaseDenormalization.NamespaceDefinitionType; @@ -60,8 +64,7 @@ public class V0_32_8_001__AirbyteConfigDatabaseDenormalization_Test extends Abst @Test public void testCompleteMigration() throws IOException, SQLException { - final Database database = getDatabase(); - final DSLContext context = DSL.using(database.getDataSource().getConnection()); + final DSLContext context = getDslContext(); SetupForNormalizedTablesTest.setup(context); V0_32_8_001__AirbyteConfigDatabaseDenormalization.migrate(context); @@ -79,7 +82,7 @@ public void testCompleteMigration() throws IOException, SQLException { } private void assertDataForWorkspace(final DSLContext context) { - Result workspaces = context.select(asterisk()) + final Result workspaces = context.select(asterisk()) .from(table("workspace")) .fetch(); Assertions.assertEquals(1, workspaces.size()); @@ -105,7 +108,7 @@ private void assertDataForWorkspace(final DSLContext context) { final List notificationList = new ArrayList<>(); final List fetchedNotifications = Jsons.deserialize(workspace.get(notifications).data(), List.class); - for (Object notification : fetchedNotifications) { + for (final Object notification : fetchedNotifications) { notificationList.add(Jsons.convertValue(notification, Notification.class)); } final StandardWorkspace workspaceFromNewTable = new StandardWorkspace() @@ -428,7 +431,7 @@ private List connectionOperationIds(final UUID connectionIdTo, final DSLCo final List ids = new ArrayList<>(); - for (Record record : connectionOperations) { + for (final Record record : connectionOperations) { ids.add(record.get(operationId)); Assertions.assertNotNull(record.get(id)); Assertions.assertEquals(now(), record.get(createdAt).toInstant()); diff --git a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_14_001__AddTombstoneToActorDefinitionTest.java b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_14_001__AddTombstoneToActorDefinitionTest.java index cd86e051c4246..fc715959b6df2 100644 --- a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_14_001__AddTombstoneToActorDefinitionTest.java +++ b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_14_001__AddTombstoneToActorDefinitionTest.java @@ -4,7 +4,6 @@ package io.airbyte.db.instance.configs.migrations; -import io.airbyte.db.Database; import io.airbyte.db.instance.configs.AbstractConfigsDatabaseTest; import io.airbyte.db.instance.configs.migrations.V0_32_8_001__AirbyteConfigDatabaseDenormalization.ActorType; import java.io.IOException; @@ -21,9 +20,7 @@ public class V0_35_14_001__AddTombstoneToActorDefinitionTest extends AbstractCon @Test public void test() throws SQLException, IOException { - - final Database database = getDatabase(); - final DSLContext context = DSL.using(database.getDataSource().getConnection()); + final DSLContext context = getDslContext(); // necessary to add actor_definition table V0_32_8_001__AirbyteConfigDatabaseDenormalization.migrate(context); diff --git a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_15_001__AddReleaseStageAndReleaseDateToActorDefinition_Test.java b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_15_001__AddReleaseStageAndReleaseDateToActorDefinition_Test.java index ad9ee714234e7..98b18daad0659 100644 --- a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_15_001__AddReleaseStageAndReleaseDateToActorDefinition_Test.java +++ b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_15_001__AddReleaseStageAndReleaseDateToActorDefinition_Test.java @@ -4,7 +4,6 @@ package io.airbyte.db.instance.configs.migrations; -import io.airbyte.db.Database; import io.airbyte.db.instance.configs.AbstractConfigsDatabaseTest; import io.airbyte.db.instance.configs.migrations.V0_32_8_001__AirbyteConfigDatabaseDenormalization.ActorType; import io.airbyte.db.instance.configs.migrations.V0_35_15_001__AddReleaseStageAndReleaseDateToActorDefinition.ReleaseStage; @@ -21,9 +20,7 @@ public class V0_35_15_001__AddReleaseStageAndReleaseDateToActorDefinition_Test e @Test public void test() throws SQLException, IOException { - - final Database database = getDatabase(); - final DSLContext context = DSL.using(database.getDataSource().getConnection()); + final DSLContext context = getDslContext(); // necessary to add actor_definition table V0_32_8_001__AirbyteConfigDatabaseDenormalization.migrate(context); diff --git a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_1_001__RemoveForeignKeyFromActorOauth_Test.java b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_1_001__RemoveForeignKeyFromActorOauth_Test.java index 99fa6944a95da..6da56e5f42801 100644 --- a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_1_001__RemoveForeignKeyFromActorOauth_Test.java +++ b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_1_001__RemoveForeignKeyFromActorOauth_Test.java @@ -13,7 +13,6 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.config.DestinationOAuthParameter; import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.db.Database; import io.airbyte.db.instance.configs.AbstractConfigsDatabaseTest; import io.airbyte.db.instance.configs.migrations.V0_32_8_001__AirbyteConfigDatabaseDenormalization.ActorType; import java.io.IOException; @@ -35,8 +34,7 @@ public class V0_35_1_001__RemoveForeignKeyFromActorOauth_Test extends AbstractCo @Test public void testCompleteMigration() throws IOException, SQLException { - final Database database = getDatabase(); - final DSLContext context = DSL.using(database.getDataSource().getConnection()); + final DSLContext context = getDslContext(); SetupForNormalizedTablesTest.setup(context); V0_32_8_001__AirbyteConfigDatabaseDenormalization.migrate(context); diff --git a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_26_001__PersistDiscoveredCatalogTest.java b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_26_001__PersistDiscoveredCatalogTest.java index d7d0b5c2ba1c3..2ac67c52df655 100644 --- a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_26_001__PersistDiscoveredCatalogTest.java +++ b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_26_001__PersistDiscoveredCatalogTest.java @@ -4,7 +4,6 @@ package io.airbyte.db.instance.configs.migrations; -import io.airbyte.db.Database; import io.airbyte.db.instance.configs.AbstractConfigsDatabaseTest; import io.airbyte.db.instance.configs.migrations.V0_32_8_001__AirbyteConfigDatabaseDenormalization.ActorType; import java.io.IOException; @@ -21,9 +20,7 @@ class V0_35_26_001__PersistDiscoveredCatalogTest extends AbstractConfigsDatabase @Test public void test() throws SQLException, IOException { - - final Database database = getDatabase(); - final DSLContext context = DSL.using(database.getDataSource().getConnection()); + final DSLContext context = getDslContext(); V0_32_8_001__AirbyteConfigDatabaseDenormalization.migrate(context); V0_35_26_001__PersistDiscoveredCatalog.migrate(context); assertCanInsertData(context); diff --git a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_28_001__AddActorCatalogMetadataColumnsTest.java b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_28_001__AddActorCatalogMetadataColumnsTest.java index 7038326b98d4f..46ed1d9c97ab1 100644 --- a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_28_001__AddActorCatalogMetadataColumnsTest.java +++ b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_28_001__AddActorCatalogMetadataColumnsTest.java @@ -4,7 +4,6 @@ package io.airbyte.db.instance.configs.migrations; -import io.airbyte.db.Database; import io.airbyte.db.instance.configs.AbstractConfigsDatabaseTest; import io.airbyte.db.instance.configs.migrations.V0_32_8_001__AirbyteConfigDatabaseDenormalization.ActorType; import java.io.IOException; @@ -21,9 +20,7 @@ public class V0_35_28_001__AddActorCatalogMetadataColumnsTest extends AbstractCo @Test public void test() throws SQLException, IOException { - - final Database database = getDatabase(); - final DSLContext context = DSL.using(database.getDataSource().getConnection()); + final DSLContext context = getDslContext(); V0_32_8_001__AirbyteConfigDatabaseDenormalization.migrate(context); V0_35_26_001__PersistDiscoveredCatalog.migrate(context); V0_35_28_001__AddActorCatalogMetadataColumns.migrate(context); diff --git a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_3_001__DropAirbyteConfigsTableTest.java b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_3_001__DropAirbyteConfigsTableTest.java index c2087655df6f0..62db5e8f26ec5 100644 --- a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_3_001__DropAirbyteConfigsTableTest.java +++ b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_3_001__DropAirbyteConfigsTableTest.java @@ -8,7 +8,6 @@ import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; -import io.airbyte.db.Database; import io.airbyte.db.instance.configs.AbstractConfigsDatabaseTest; import java.io.IOException; import java.sql.SQLException; @@ -20,8 +19,7 @@ public class V0_35_3_001__DropAirbyteConfigsTableTest extends AbstractConfigsDat @Test public void test() throws IOException, SQLException { - final Database database = getDatabase(); - final DSLContext context = DSL.using(database.getDataSource().getConnection()); + final DSLContext context = getDslContext(); assertTrue(airbyteConfigsExists(context)); V0_35_3_001__DropAirbyteConfigsTable.dropTable(context); assertFalse(airbyteConfigsExists(context)); diff --git a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_59_001__AddPublicToActorDefinitionTest.java b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_59_001__AddPublicToActorDefinitionTest.java index d2e7f080d72ea..b70c2b5ea80f5 100644 --- a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_59_001__AddPublicToActorDefinitionTest.java +++ b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_59_001__AddPublicToActorDefinitionTest.java @@ -4,7 +4,6 @@ package io.airbyte.db.instance.configs.migrations; -import io.airbyte.db.Database; import io.airbyte.db.instance.configs.AbstractConfigsDatabaseTest; import io.airbyte.db.instance.configs.migrations.V0_32_8_001__AirbyteConfigDatabaseDenormalization.ActorType; import java.io.IOException; @@ -21,9 +20,7 @@ public class V0_35_59_001__AddPublicToActorDefinitionTest extends AbstractConfig @Test public void test() throws SQLException, IOException { - - final Database database = getDatabase(); - final DSLContext context = DSL.using(database.getDataSource().getConnection()); + final DSLContext context = getDslContext(); // necessary to add actor_definition table V0_32_8_001__AirbyteConfigDatabaseDenormalization.migrate(context); diff --git a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_59_002__AddActorDefinitionWorkspaceGrantTableTest.java b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_59_002__AddActorDefinitionWorkspaceGrantTableTest.java index bce4bd541ffd2..b24cbc0a332a3 100644 --- a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_59_002__AddActorDefinitionWorkspaceGrantTableTest.java +++ b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_59_002__AddActorDefinitionWorkspaceGrantTableTest.java @@ -4,7 +4,6 @@ package io.airbyte.db.instance.configs.migrations; -import io.airbyte.db.Database; import io.airbyte.db.instance.configs.AbstractConfigsDatabaseTest; import io.airbyte.db.instance.configs.migrations.V0_32_8_001__AirbyteConfigDatabaseDenormalization.ActorType; import java.io.IOException; @@ -21,9 +20,7 @@ public class V0_35_59_002__AddActorDefinitionWorkspaceGrantTableTest extends Abs @Test public void test() throws SQLException, IOException { - - final Database database = getDatabase(); - final DSLContext context = DSL.using(database.getDataSource().getConnection()); + final DSLContext context = getDslContext(); V0_32_8_001__AirbyteConfigDatabaseDenormalization.migrate(context); final UUID actorDefinitionId = new UUID(0L, 1L); diff --git a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_59_003__AddCustomToActorDefinitionTest.java b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_59_003__AddCustomToActorDefinitionTest.java index 1c0a2862ca048..4c445e92d1a1e 100644 --- a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_59_003__AddCustomToActorDefinitionTest.java +++ b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_35_59_003__AddCustomToActorDefinitionTest.java @@ -4,7 +4,6 @@ package io.airbyte.db.instance.configs.migrations; -import io.airbyte.db.Database; import io.airbyte.db.instance.configs.AbstractConfigsDatabaseTest; import io.airbyte.db.instance.configs.migrations.V0_32_8_001__AirbyteConfigDatabaseDenormalization.ActorType; import java.io.IOException; @@ -21,9 +20,7 @@ class V0_35_59_003__AddCustomToActorDefinitionTest extends AbstractConfigsDataba @Test public void test() throws SQLException, IOException { - - final Database database = getDatabase(); - final DSLContext context = DSL.using(database.getDataSource().getConnection()); + final DSLContext context = getDslContext(); // necessary to add actor_definition table V0_32_8_001__AirbyteConfigDatabaseDenormalization.migrate(context); diff --git a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/jobs/AbstractJobsDatabaseTest.java b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/jobs/AbstractJobsDatabaseTest.java index b14fc53b8a638..fae7759a3d708 100644 --- a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/jobs/AbstractJobsDatabaseTest.java +++ b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/jobs/AbstractJobsDatabaseTest.java @@ -8,12 +8,14 @@ import io.airbyte.db.instance.AbstractDatabaseTest; import io.airbyte.db.instance.test.TestDatabaseProviders; import java.io.IOException; +import javax.sql.DataSource; +import org.jooq.DSLContext; public abstract class AbstractJobsDatabaseTest extends AbstractDatabaseTest { @Override - public Database getDatabase() throws IOException { - return new TestDatabaseProviders(container).turnOffMigration().createNewJobsDatabase(); + public Database getDatabase(final DataSource dataSource, final DSLContext dslContext) throws IOException { + return new TestDatabaseProviders(dataSource, dslContext).turnOffMigration().createNewJobsDatabase(); } } diff --git a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/jobs/JobsDatabaseInstanceTest.java b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/jobs/JobsDatabaseInstanceTest.java index a3ec8d804cf80..aa1339876d8c2 100644 --- a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/jobs/JobsDatabaseInstanceTest.java +++ b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/jobs/JobsDatabaseInstanceTest.java @@ -7,6 +7,7 @@ import static org.jooq.impl.DSL.select; import static org.junit.jupiter.api.Assertions.assertThrows; +import io.airbyte.db.Database; import org.jooq.exception.DataAccessException; import org.junit.jupiter.api.Test; @@ -14,15 +15,15 @@ class JobsDatabaseInstanceTest extends AbstractJobsDatabaseTest { @Test public void testGet() throws Exception { - // when the database has been initialized and loaded with data (in setup method), the get method - // should return the database - database = new JobsDatabaseInstance(container.getUsername(), container.getPassword(), container.getJdbcUrl()).getInitialized(); + final Database database = new JobsDatabaseInstance(getDslContext()).getInitialized(); // check table database.query(ctx -> ctx.fetchExists(select().from("airbyte_metadata"))); } @Test public void testGetAndInitialize() throws Exception { + final Database database = new JobsDatabaseInstance(getDslContext()).getInitialized(); + // check table database.query(ctx -> ctx.fetchExists(select().from("jobs"))); database.query(ctx -> ctx.fetchExists(select().from("attempts"))); @@ -31,9 +32,9 @@ public void testGetAndInitialize() throws Exception { // when the jobs database has been initialized, calling getAndInitialize again will not change // anything final String testSchema = "CREATE TABLE IF NOT EXISTS airbyte_test_metadata(id BIGINT PRIMARY KEY);"; - database = new JobsDatabaseInstance(container.getUsername(), container.getPassword(), container.getJdbcUrl(), testSchema).getAndInitialize(); + final Database database2 = new JobsDatabaseInstance(getDslContext(), testSchema).getAndInitialize(); // the airbyte_test_metadata table does not exist - assertThrows(DataAccessException.class, () -> database.query(ctx -> ctx.fetchExists(select().from("airbyte_test_metadata")))); + assertThrows(DataAccessException.class, () -> database2.query(ctx -> ctx.fetchExists(select().from("airbyte_test_metadata")))); } } diff --git a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/jobs/JobsDatabaseMigratorTest.java b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/jobs/JobsDatabaseMigratorTest.java index 219fc8b0e998c..8447a8c4c8ea0 100644 --- a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/jobs/JobsDatabaseMigratorTest.java +++ b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/jobs/JobsDatabaseMigratorTest.java @@ -4,9 +4,11 @@ package io.airbyte.db.instance.jobs; +import io.airbyte.db.factory.FlywayFactory; import io.airbyte.db.instance.DatabaseMigrator; import io.airbyte.db.instance.development.MigrationDevHelper; import java.io.IOException; +import org.flywaydb.core.Flyway; import org.junit.jupiter.api.Test; public class JobsDatabaseMigratorTest extends AbstractJobsDatabaseTest { @@ -15,7 +17,9 @@ public class JobsDatabaseMigratorTest extends AbstractJobsDatabaseTest { @Test public void dumpSchema() throws IOException { - final DatabaseMigrator migrator = new JobsDatabaseMigrator(database, JobsDatabaseMigratorTest.class.getSimpleName()); + final Flyway flyway = FlywayFactory.create(getDataSource(), getClass().getSimpleName(), JobsDatabaseMigrator.DB_IDENTIFIER, + JobsDatabaseMigrator.MIGRATION_FILE_LOCATION); + final DatabaseMigrator migrator = new JobsDatabaseMigrator(database, flyway); migrator.migrate(); final String schema = migrator.dumpSchema(); MigrationDevHelper.dumpSchema(schema, SCHEMA_DUMP_FILE, false); diff --git a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/jobs/migrations/V0_35_40_001_MigrateFailureReasonEnumValues_Test.java b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/jobs/migrations/V0_35_40_001_MigrateFailureReasonEnumValues_Test.java index b2d7e93f4dbaf..4a1d0818b97fe 100644 --- a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/jobs/migrations/V0_35_40_001_MigrateFailureReasonEnumValues_Test.java +++ b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/jobs/migrations/V0_35_40_001_MigrateFailureReasonEnumValues_Test.java @@ -18,7 +18,6 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.config.Metadata; -import io.airbyte.db.Database; import io.airbyte.db.instance.jobs.AbstractJobsDatabaseTest; import io.airbyte.db.instance.jobs.migrations.V0_35_40_001__MigrateFailureReasonEnumValues.AttemptFailureSummaryForMigration; import io.airbyte.db.instance.jobs.migrations.V0_35_40_001__MigrateFailureReasonEnumValues.FailureReasonForMigration; @@ -100,8 +99,7 @@ public class V0_35_40_001_MigrateFailureReasonEnumValues_Test extends AbstractJo @Test public void test() throws Exception { - final Database database = getDatabase(); - final DSLContext ctx = DSL.using(database.getDataSource().getConnection()); + final DSLContext ctx = getDslContext(); V0_35_5_001__Add_failureSummary_col_to_Attempts.migrate(ctx); diff --git a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/jobs/migrations/V0_35_5_001__Add_failureSummary_col_to_AttemptsTest.java b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/jobs/migrations/V0_35_5_001__Add_failureSummary_col_to_AttemptsTest.java index 94b79480346f8..eb3163d0e73ec 100644 --- a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/jobs/migrations/V0_35_5_001__Add_failureSummary_col_to_AttemptsTest.java +++ b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/jobs/migrations/V0_35_5_001__Add_failureSummary_col_to_AttemptsTest.java @@ -4,7 +4,6 @@ package io.airbyte.db.instance.jobs.migrations; -import io.airbyte.db.Database; import io.airbyte.db.instance.jobs.AbstractJobsDatabaseTest; import java.io.IOException; import java.sql.SQLException; @@ -17,8 +16,7 @@ public class V0_35_5_001__Add_failureSummary_col_to_AttemptsTest extends Abstrac @Test public void test() throws SQLException, IOException { - final Database database = getDatabase(); - final DSLContext context = DSL.using(database.getDataSource().getConnection()); + final DSLContext context = getDslContext(); Assertions.assertFalse(failureSummaryColumnExists(context)); V0_35_5_001__Add_failureSummary_col_to_Attempts.addFailureSummaryColumn(context); Assertions.assertTrue(failureSummaryColumnExists(context)); diff --git a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/toys/ToysDatabaseInstance.java b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/toys/ToysDatabaseInstance.java index bd8813fdae7b9..78bd6a232edb8 100644 --- a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/toys/ToysDatabaseInstance.java +++ b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/toys/ToysDatabaseInstance.java @@ -10,6 +10,7 @@ import java.io.IOException; import java.util.Collections; import java.util.function.Function; +import org.jooq.DSLContext; /** * A database instance for testing purposes only. @@ -27,8 +28,8 @@ public class ToysDatabaseInstance extends BaseDatabaseInstance { } }; - protected ToysDatabaseInstance(final String username, final String password, final String connectionString) throws IOException { - super(username, password, connectionString, MoreResources.readResource(SCHEMA_PATH), DATABASE_LOGGING_NAME, Collections.singleton(TABLE_NAME), + protected ToysDatabaseInstance(final DSLContext dslContext) throws IOException { + super(dslContext, DATABASE_LOGGING_NAME, MoreResources.readResource(SCHEMA_PATH), Collections.singleton(TABLE_NAME), IS_DATABASE_READY); } diff --git a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/toys/ToysDatabaseMigrator.java b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/toys/ToysDatabaseMigrator.java index a77eed3e809fc..f2f275f1bd5e1 100644 --- a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/toys/ToysDatabaseMigrator.java +++ b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/toys/ToysDatabaseMigrator.java @@ -6,6 +6,7 @@ import io.airbyte.db.Database; import io.airbyte.db.instance.FlywayDatabaseMigrator; +import org.flywaydb.core.Flyway; /** * A database migrator for testing purposes only. @@ -15,8 +16,8 @@ public class ToysDatabaseMigrator extends FlywayDatabaseMigrator { public static final String DB_IDENTIFIER = "toy"; public static final String MIGRATION_FILE_LOCATION = "classpath:io/airbyte/db/instance/toys/migrations"; - public ToysDatabaseMigrator(final Database database, final String migrationRunner) { - super(database, DB_IDENTIFIER, migrationRunner, MIGRATION_FILE_LOCATION); + public ToysDatabaseMigrator(final Database database, final Flyway flyway) { + super(database, flyway); } @Override diff --git a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/toys/ToysDatabaseMigratorTest.java b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/toys/ToysDatabaseMigratorTest.java index 4c432e8442623..67206e2f857c5 100644 --- a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/toys/ToysDatabaseMigratorTest.java +++ b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/toys/ToysDatabaseMigratorTest.java @@ -8,9 +8,13 @@ import io.airbyte.commons.resources.MoreResources; import io.airbyte.db.Database; +import io.airbyte.db.factory.FlywayFactory; import io.airbyte.db.instance.AbstractDatabaseTest; import io.airbyte.db.instance.DatabaseMigrator; import java.io.IOException; +import javax.sql.DataSource; +import org.flywaydb.core.Flyway; +import org.jooq.DSLContext; import org.junit.jupiter.api.Test; class ToysDatabaseMigratorTest extends AbstractDatabaseTest { @@ -19,13 +23,16 @@ class ToysDatabaseMigratorTest extends AbstractDatabaseTest { private static final String POST_MIGRATION_SCHEMA_DUMP = "toys_database/schema_dump.txt"; @Override - public Database getDatabase() throws IOException { - return new ToysDatabaseInstance(container.getUsername(), container.getPassword(), container.getJdbcUrl()).getAndInitialize(); + public Database getDatabase(final DataSource dataSource, final DSLContext dslContext) throws IOException { + return new ToysDatabaseInstance(dslContext).getAndInitialize(); } @Test public void testMigration() throws Exception { - final DatabaseMigrator migrator = new ToysDatabaseMigrator(database, ToysDatabaseMigratorTest.class.getSimpleName()); + final DataSource dataSource = getDataSource(); + final Flyway flyway = FlywayFactory.create(dataSource, getClass().getSimpleName(), ToysDatabaseMigrator.DB_IDENTIFIER, + ToysDatabaseMigrator.MIGRATION_FILE_LOCATION); + final DatabaseMigrator migrator = new ToysDatabaseMigrator(database, flyway); // Compare pre migration baseline schema migrator.createBaseline(); diff --git a/airbyte-db/lib/src/test/java/io/airbyte/db/jdbc/TestDefaultJdbcDatabase.java b/airbyte-db/lib/src/test/java/io/airbyte/db/jdbc/TestDefaultJdbcDatabase.java index 0661c5b29af72..2dbc7f70741ba 100644 --- a/airbyte-db/lib/src/test/java/io/airbyte/db/jdbc/TestDefaultJdbcDatabase.java +++ b/airbyte-db/lib/src/test/java/io/airbyte/db/jdbc/TestDefaultJdbcDatabase.java @@ -12,11 +12,13 @@ import io.airbyte.commons.io.IOs; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.string.Strings; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.test.utils.PostgreSQLContainerHelper; import java.sql.SQLException; import java.util.List; import java.util.stream.Stream; +import javax.sql.DataSource; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; @@ -95,14 +97,15 @@ void testQuery() throws SQLException { } private JdbcDatabase getDatabaseFromConfig(final JsonNode config) { - return Databases.createJdbcDatabase( + final DataSource dataSource = DataSourceFactory.create( config.get("username").asText(), config.get("password").asText(), - String.format("jdbc:postgresql://%s:%s/%s", + DatabaseDriver.POSTGRESQL.getDriverClassName(), + String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), config.get("host").asText(), - config.get("port").asText(), - config.get("database").asText()), - "org.postgresql.Driver"); + config.get("port").asInt(), + config.get("database").asText())); + return new DefaultJdbcDatabase(dataSource); } private JsonNode getConfig(final PostgreSQLContainer psqlDb, final String dbName) { diff --git a/airbyte-db/lib/src/test/java/io/airbyte/db/jdbc/TestJdbcUtils.java b/airbyte-db/lib/src/test/java/io/airbyte/db/jdbc/TestJdbcUtils.java index 001bfa1d3a969..0c86779cd0570 100644 --- a/airbyte-db/lib/src/test/java/io/airbyte/db/jdbc/TestJdbcUtils.java +++ b/airbyte-db/lib/src/test/java/io/airbyte/db/jdbc/TestJdbcUtils.java @@ -17,6 +17,8 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.stream.MoreStreams; import io.airbyte.commons.string.Strings; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.protocol.models.JsonSchemaType; import io.airbyte.test.utils.PostgreSQLContainerHelper; import java.math.BigDecimal; @@ -30,7 +32,7 @@ import java.util.List; import java.util.Map; import java.util.stream.Collectors; -import org.apache.commons.dbcp2.BasicDataSource; +import javax.sql.DataSource; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -46,7 +48,7 @@ public class TestJdbcUtils { private static PostgreSQLContainer PSQL_DB; - private BasicDataSource dataSource; + private DataSource dataSource; private static final JdbcSourceOperations sourceOperations = JdbcUtils.getDefaultSourceOperations(); @BeforeAll @@ -66,14 +68,14 @@ void setup() throws Exception { final String tmpFilePath = IOs.writeFileToRandomTmpDir(initScriptName, "CREATE DATABASE " + dbName + ";"); PostgreSQLContainerHelper.runSqlScript(MountableFile.forHostPath(tmpFilePath), PSQL_DB); - dataSource = new BasicDataSource(); - dataSource.setDriverClassName("org.postgresql.Driver"); - dataSource.setUsername(config.get("username").asText()); - dataSource.setPassword(config.get("password").asText()); - dataSource.setUrl(String.format("jdbc:postgresql://%s:%s/%s", - config.get("host").asText(), - config.get("port").asText(), - config.get("database").asText())); + dataSource = DataSourceFactory.create( + config.get("username").asText(), + config.get("password").asText(), + DatabaseDriver.POSTGRESQL.getDriverClassName(), + String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), + config.get("host").asText(), + config.get("port").asInt(), + config.get("database").asText())); final JdbcDatabase defaultJdbcDatabase = new DefaultJdbcDatabase(dataSource); diff --git a/airbyte-db/lib/src/test/java/io/airbyte/db/jdbc/TestStreamingJdbcDatabase.java b/airbyte-db/lib/src/test/java/io/airbyte/db/jdbc/TestStreamingJdbcDatabase.java index cac812dd71cda..c7c08a33ec058 100644 --- a/airbyte-db/lib/src/test/java/io/airbyte/db/jdbc/TestStreamingJdbcDatabase.java +++ b/airbyte-db/lib/src/test/java/io/airbyte/db/jdbc/TestStreamingJdbcDatabase.java @@ -14,6 +14,8 @@ import io.airbyte.commons.io.IOs; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.string.Strings; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.db.jdbc.streaming.AdaptiveStreamingQueryConfig; import io.airbyte.db.jdbc.streaming.FetchSizeConstants; import io.airbyte.test.utils.PostgreSQLContainerHelper; @@ -24,7 +26,7 @@ import java.util.List; import java.util.Set; import java.util.concurrent.atomic.AtomicReference; -import org.apache.commons.dbcp2.BasicDataSource; +import javax.sql.DataSource; import org.elasticsearch.common.collect.Map; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; @@ -65,14 +67,14 @@ void setup() { final String tmpFilePath = IOs.writeFileToRandomTmpDir(initScriptName, "CREATE DATABASE " + dbName + ";"); PostgreSQLContainerHelper.runSqlScript(MountableFile.forHostPath(tmpFilePath), PSQL_DB); - final BasicDataSource connectionPool = new BasicDataSource(); - connectionPool.setDriverClassName("org.postgresql.Driver"); - connectionPool.setUsername(config.get("username").asText()); - connectionPool.setPassword(config.get("password").asText()); - connectionPool.setUrl(String.format("jdbc:postgresql://%s:%s/%s", - config.get("host").asText(), - config.get("port").asText(), - config.get("database").asText())); + final DataSource connectionPool = DataSourceFactory.create( + config.get("username").asText(), + config.get("password").asText(), + DatabaseDriver.POSTGRESQL.getDriverClassName(), + String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), + config.get("host").asText(), + config.get("port").asInt(), + config.get("database").asText())); defaultJdbcDatabase = spy(new DefaultJdbcDatabase(connectionPool)); streamingJdbcDatabase = new StreamingJdbcDatabase(connectionPool, JdbcUtils.getDefaultSourceOperations(), AdaptiveStreamingQueryConfig::new); diff --git a/airbyte-integrations/bases/base-java/build.gradle b/airbyte-integrations/bases/base-java/build.gradle index c77f75cb38a36..42139f79dbe07 100644 --- a/airbyte-integrations/bases/base-java/build.gradle +++ b/airbyte-integrations/bases/base-java/build.gradle @@ -18,8 +18,10 @@ dependencies { implementation 'org.bouncycastle:bcpkix-jdk15on:1.66' implementation 'org.bouncycastle:bctls-jdk15on:1.66' - implementation "org.testcontainers:testcontainers:1.15.3" - implementation "org.testcontainers:jdbc:1.15.3" + implementation libs.testcontainers + implementation libs.testcontainers.jdbc implementation files(project(':airbyte-integrations:bases:base').airbyteDocker.outputs) + + testImplementation 'commons-lang:commons-lang:2.6' } diff --git a/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/destination/buffered_stream_consumer/BufferedStreamConsumerTest.java b/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/destination/buffered_stream_consumer/BufferedStreamConsumerTest.java index 1816b3abf1c45..25e467bb5e81e 100644 --- a/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/destination/buffered_stream_consumer/BufferedStreamConsumerTest.java +++ b/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/destination/buffered_stream_consumer/BufferedStreamConsumerTest.java @@ -35,9 +35,9 @@ import java.util.List; import java.util.function.Consumer; import java.util.stream.Collectors; +import org.apache.commons.lang.RandomStringUtils; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import org.testcontainers.shaded.org.apache.commons.lang.RandomStringUtils; public class BufferedStreamConsumerTest { diff --git a/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/AbstractSourceDatabaseTypeTest.java b/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/AbstractSourceDatabaseTypeTest.java index 6ba055ea19cc5..8e637e654e463 100644 --- a/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/AbstractSourceDatabaseTypeTest.java +++ b/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/AbstractSourceDatabaseTypeTest.java @@ -172,8 +172,6 @@ private void setupDatabaseInternal() throws Exception { return null; }); } - - database.close(); } /** diff --git a/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/performancetest/AbstractSourceFillDbWithTestData.java b/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/performancetest/AbstractSourceFillDbWithTestData.java index 9680761dd699c..096bace51d149 100644 --- a/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/performancetest/AbstractSourceFillDbWithTestData.java +++ b/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/performancetest/AbstractSourceFillDbWithTestData.java @@ -68,9 +68,6 @@ public void addTestData(final String dbName, } return null; }); - - database.close(); - } /** diff --git a/airbyte-integrations/connectors/destination-cassandra/build.gradle b/airbyte-integrations/connectors/destination-cassandra/build.gradle index c9560e865868e..fb307b0ced87f 100644 --- a/airbyte-integrations/connectors/destination-cassandra/build.gradle +++ b/airbyte-integrations/connectors/destination-cassandra/build.gradle @@ -10,7 +10,6 @@ application { } def cassandraDriver = '4.13.0' -def testContainersVersion = '1.16.0' def assertVersion = '3.21.0' dependencies { @@ -26,7 +25,7 @@ dependencies { // https://mvnrepository.com/artifact/org.assertj/assertj-core testImplementation "org.assertj:assertj-core:${assertVersion}" - testImplementation "org.testcontainers:cassandra:${testContainersVersion}" + testImplementation libs.testcontainers.cassandra integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') diff --git a/airbyte-integrations/connectors/destination-clickhouse-strict-encrypt/build.gradle b/airbyte-integrations/connectors/destination-clickhouse-strict-encrypt/build.gradle index 5899daf2bc9d5..72efaa86a9f1c 100644 --- a/airbyte-integrations/connectors/destination-clickhouse-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/destination-clickhouse-strict-encrypt/build.gradle @@ -21,10 +21,10 @@ dependencies { implementation 'ru.yandex.clickhouse:clickhouse-jdbc:0.3.1-patch' // https://mvnrepository.com/artifact/org.testcontainers/clickhouse - testImplementation 'org.testcontainers:clickhouse:1.16.2' + testImplementation libs.testcontainers.clickhouse integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-clickhouse') // https://mvnrepository.com/artifact/org.testcontainers/clickhouse - integrationTestJavaImplementation "org.testcontainers:clickhouse:1.16.2" + integrationTestJavaImplementation libs.testcontainers.clickhouse } diff --git a/airbyte-integrations/connectors/destination-clickhouse-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/clickhouse/ClickhouseDestinationStrictEncryptAcceptanceTest.java b/airbyte-integrations/connectors/destination-clickhouse-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/clickhouse/ClickhouseDestinationStrictEncryptAcceptanceTest.java index ee22f15849f7e..f9822772d1d8d 100644 --- a/airbyte-integrations/connectors/destination-clickhouse-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/clickhouse/ClickhouseDestinationStrictEncryptAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-clickhouse-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/clickhouse/ClickhouseDestinationStrictEncryptAcceptanceTest.java @@ -8,7 +8,8 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.base.JavaBaseConstants; import io.airbyte.integrations.destination.ExtendedNameTransformer; @@ -132,11 +133,13 @@ private static JdbcDatabase getDatabase(final JsonNode config) { config.get("host").asText(), config.get("port").asText(), config.get("database").asText()); - return Databases.createJdbcDatabase( + return new DefaultJdbcDatabase(DataSourceFactory.create( config.get("username").asText(), config.has("password") ? config.get("password").asText() : null, - jdbcStr, - ClickhouseDestination.DRIVER_CLASS); + ClickhouseDestination.DRIVER_CLASS, + jdbcStr + ) + ); } @Override diff --git a/airbyte-integrations/connectors/destination-clickhouse/build.gradle b/airbyte-integrations/connectors/destination-clickhouse/build.gradle index 496a0e7f09e41..acf3a11a0d0b9 100644 --- a/airbyte-integrations/connectors/destination-clickhouse/build.gradle +++ b/airbyte-integrations/connectors/destination-clickhouse/build.gradle @@ -21,11 +21,11 @@ dependencies { implementation 'ru.yandex.clickhouse:clickhouse-jdbc:0.3.1-patch' // https://mvnrepository.com/artifact/org.testcontainers/clickhouse - testImplementation 'org.testcontainers:clickhouse:1.16.2' + testImplementation libs.testcontainers.clickhouse integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-clickhouse') integrationTestJavaImplementation project(':airbyte-workers') // https://mvnrepository.com/artifact/org.testcontainers/clickhouse - integrationTestJavaImplementation "org.testcontainers:clickhouse:1.16.2" + integrationTestJavaImplementation libs.testcontainers.clickhouse } diff --git a/airbyte-integrations/connectors/destination-clickhouse/src/main/java/io/airbyte/integrations/destination/clickhouse/ClickhouseDestination.java b/airbyte-integrations/connectors/destination-clickhouse/src/main/java/io/airbyte/integrations/destination/clickhouse/ClickhouseDestination.java index b77459a8ee62b..a41b3f4f2a452 100644 --- a/airbyte-integrations/connectors/destination-clickhouse/src/main/java/io/airbyte/integrations/destination/clickhouse/ClickhouseDestination.java +++ b/airbyte-integrations/connectors/destination-clickhouse/src/main/java/io/airbyte/integrations/destination/clickhouse/ClickhouseDestination.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.base.Destination; import io.airbyte.integrations.base.IntegrationRunner; @@ -25,7 +26,7 @@ public class ClickhouseDestination extends AbstractJdbcDestination implements De private static final Logger LOGGER = LoggerFactory.getLogger(ClickhouseDestination.class); - public static final String DRIVER_CLASS = "ru.yandex.clickhouse.ClickHouseDriver"; + public static final String DRIVER_CLASS = DatabaseDriver.CLICKHOUSE.getDriverClassName(); public static final List HOST_KEY = List.of("host"); public static final List PORT_KEY = List.of("port"); diff --git a/airbyte-integrations/connectors/destination-clickhouse/src/test-integration/java/io/airbyte/integrations/destination/clickhouse/ClickhouseDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-clickhouse/src/test-integration/java/io/airbyte/integrations/destination/clickhouse/ClickhouseDestinationAcceptanceTest.java index 6dee8276d19ec..1166411612c40 100644 --- a/airbyte-integrations/connectors/destination-clickhouse/src/test-integration/java/io/airbyte/integrations/destination/clickhouse/ClickhouseDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-clickhouse/src/test-integration/java/io/airbyte/integrations/destination/clickhouse/ClickhouseDestinationAcceptanceTest.java @@ -8,7 +8,9 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.base.JavaBaseConstants; import io.airbyte.integrations.destination.ExtendedNameTransformer; @@ -126,14 +128,17 @@ private List retrieveRecordsFromTable(final String tableName, final St } private static JdbcDatabase getDatabase(final JsonNode config) { - return Databases.createJdbcDatabase( - config.get("username").asText(), - config.has("password") ? config.get("password").asText() : null, - String.format("jdbc:clickhouse://%s:%s/%s", - config.get("host").asText(), - config.get("port").asText(), - config.get("database").asText()), - ClickhouseDestination.DRIVER_CLASS); + return new DefaultJdbcDatabase( + DataSourceFactory.create( + config.get("username").asText(), + config.has("password") ? config.get("password").asText() : null, + ClickhouseDestination.DRIVER_CLASS, + String.format(DatabaseDriver.CLICKHOUSE.getUrlFormatString(), + config.get("host").asText(), + config.get("port").asInt(), + config.get("database").asText()) + ) + ); } @Override diff --git a/airbyte-integrations/connectors/destination-clickhouse/src/test-integration/java/io/airbyte/integrations/destination/clickhouse/SshClickhouseDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-clickhouse/src/test-integration/java/io/airbyte/integrations/destination/clickhouse/SshClickhouseDestinationAcceptanceTest.java index d5b0707116eca..eeb4bd757ac2c 100644 --- a/airbyte-integrations/connectors/destination-clickhouse/src/test-integration/java/io/airbyte/integrations/destination/clickhouse/SshClickhouseDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-clickhouse/src/test-integration/java/io/airbyte/integrations/destination/clickhouse/SshClickhouseDestinationAcceptanceTest.java @@ -7,7 +7,9 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import io.airbyte.commons.json.Jsons; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.base.JavaBaseConstants; import io.airbyte.integrations.base.ssh.SshBastionContainer; @@ -144,14 +146,17 @@ protected List resolveIdentifier(final String identifier) { } private static JdbcDatabase getDatabase(final JsonNode config) { - return Databases.createJdbcDatabase( - config.get("username").asText(), - config.has("password") ? config.get("password").asText() : null, - String.format("jdbc:clickhouse://%s:%s/%s", - config.get("host").asText(), - config.get("port").asText(), - config.get("database").asText()), - ClickhouseDestination.DRIVER_CLASS); + return new DefaultJdbcDatabase( + DataSourceFactory.create( + config.get("username").asText(), + config.has("password") ? config.get("password").asText() : null, + ClickhouseDestination.DRIVER_CLASS, + String.format(DatabaseDriver.CLICKHOUSE.getUrlFormatString(), + config.get("host").asText(), + config.get("port").asInt(), + config.get("database").asText()) + ) + ); } @Override diff --git a/airbyte-integrations/connectors/destination-clickhouse/src/test/java/io/airbyte/integrations/destination/clickhouse/ClickhouseDestinationTest.java b/airbyte-integrations/connectors/destination-clickhouse/src/test/java/io/airbyte/integrations/destination/clickhouse/ClickhouseDestinationTest.java index 75e5d96637422..765acaef02c18 100644 --- a/airbyte-integrations/connectors/destination-clickhouse/src/test/java/io/airbyte/integrations/destination/clickhouse/ClickhouseDestinationTest.java +++ b/airbyte-integrations/connectors/destination-clickhouse/src/test/java/io/airbyte/integrations/destination/clickhouse/ClickhouseDestinationTest.java @@ -10,7 +10,8 @@ import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.map.MoreMaps; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.integrations.base.AirbyteMessageConsumer; @@ -125,14 +126,17 @@ void sanityTest() throws Exception { .withData(Jsons.jsonNode(ImmutableMap.of(DB_NAME + "." + STREAM_NAME, 10))))); consumer.close(); - final JdbcDatabase database = Databases.createJdbcDatabase( - config.get("username").asText(), - config.get("password").asText(), - String.format("jdbc:clickhouse://%s:%s/%s", - config.get("host").asText(), - config.get("port").asText(), - config.get("database").asText()), - ClickhouseDestination.DRIVER_CLASS); + final JdbcDatabase database = new DefaultJdbcDatabase( + DataSourceFactory.create( + config.get("username").asText(), + config.get("password").asText(), + ClickhouseDestination.DRIVER_CLASS, + String.format("jdbc:clickhouse://%s:%s/%s", + config.get("host").asText(), + config.get("port").asText(), + config.get("database").asText()) + ) + ); final List actualRecords = database.bufferedResultSetQuery( connection -> connection.createStatement().executeQuery( diff --git a/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksDestination.java b/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksDestination.java index 8441df92bbbd1..b5f0df7de8796 100644 --- a/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksDestination.java +++ b/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksDestination.java @@ -5,7 +5,8 @@ package io.airbyte.integrations.destination.databricks; import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.base.AirbyteMessageConsumer; import io.airbyte.integrations.base.IntegrationRunner; @@ -76,11 +77,13 @@ static String getDatabricksConnectionString(final DatabricksDestinationConfig da } static JdbcDatabase getDatabase(final DatabricksDestinationConfig databricksConfig) { - return Databases.createJdbcDatabase( + return new DefaultJdbcDatabase(DataSourceFactory.create( DatabricksConstants.DATABRICKS_USERNAME, databricksConfig.getDatabricksPersonalAccessToken(), - getDatabricksConnectionString(databricksConfig), - DatabricksConstants.DATABRICKS_DRIVER_CLASS); + DatabricksConstants.DATABRICKS_DRIVER_CLASS, + getDatabricksConnectionString(databricksConfig) + ) + ); } } diff --git a/airbyte-integrations/connectors/destination-databricks/src/test-integration/java/io/airbyte/integrations/destination/databricks/DatabricksDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-databricks/src/test-integration/java/io/airbyte/integrations/destination/databricks/DatabricksDestinationAcceptanceTest.java index 55f03862fa300..80a19ab36126d 100644 --- a/airbyte-integrations/connectors/destination-databricks/src/test-integration/java/io/airbyte/integrations/destination/databricks/DatabricksDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-databricks/src/test-integration/java/io/airbyte/integrations/destination/databricks/DatabricksDestinationAcceptanceTest.java @@ -17,7 +17,7 @@ import io.airbyte.commons.io.IOs; import io.airbyte.commons.json.Jsons; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.integrations.base.JavaBaseConstants; import io.airbyte.integrations.destination.ExtendedNameTransformer; @@ -32,6 +32,7 @@ import java.util.List; import java.util.stream.Collectors; import org.apache.commons.lang3.RandomStringUtils; +import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -141,12 +142,8 @@ protected void tearDown(final TestDestinationEnv testEnv) throws SQLException { } private static Database getDatabase(final DatabricksDestinationConfig databricksConfig) { - return Databases.createDatabase( - DatabricksConstants.DATABRICKS_USERNAME, - databricksConfig.getDatabricksPersonalAccessToken(), - DatabricksDestination.getDatabricksConnectionString(databricksConfig), - DatabricksConstants.DATABRICKS_DRIVER_CLASS, - SQLDialect.DEFAULT); + final DSLContext dslContext = DSLContextFactory.create(DatabricksConstants.DATABRICKS_USERNAME, databricksConfig.getDatabricksPersonalAccessToken(), DatabricksConstants.DATABRICKS_DRIVER_CLASS, DatabricksDestination.getDatabricksConnectionString(databricksConfig), SQLDialect.DEFAULT); + return new Database(dslContext); } } diff --git a/airbyte-integrations/connectors/destination-elasticsearch/build.gradle b/airbyte-integrations/connectors/destination-elasticsearch/build.gradle index c4de7d303188a..d498126e8a3db 100644 --- a/airbyte-integrations/connectors/destination-elasticsearch/build.gradle +++ b/airbyte-integrations/connectors/destination-elasticsearch/build.gradle @@ -29,9 +29,9 @@ dependencies { // MIT // https://www.testcontainers.org/ - //implementation "org.testcontainers:testcontainers:1.16.0" - testImplementation "org.testcontainers:elasticsearch:1.15.3" - integrationTestJavaImplementation "org.testcontainers:elasticsearch:1.15.3" + //implementation libs.testcontainers.elasticsearch + testImplementation libs.testcontainers.elasticsearch + integrationTestJavaImplementation libs.testcontainers.elasticsearch integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-elasticsearch') diff --git a/airbyte-integrations/connectors/destination-gcs/build.gradle b/airbyte-integrations/connectors/destination-gcs/build.gradle index 8b03c8e8d13ed..ebbad184ccbec 100644 --- a/airbyte-integrations/connectors/destination-gcs/build.gradle +++ b/airbyte-integrations/connectors/destination-gcs/build.gradle @@ -33,6 +33,7 @@ dependencies { implementation group: 'com.github.airbytehq', name: 'json-avro-converter', version: '1.0.1' testImplementation 'org.apache.commons:commons-lang3:3.11' + testImplementation 'org.xerial.snappy:snappy-java:1.1.8.4' integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-gcs') diff --git a/airbyte-integrations/connectors/destination-jdbc/build.gradle b/airbyte-integrations/connectors/destination-jdbc/build.gradle index ea25dc88d16eb..a34f53785bc14 100644 --- a/airbyte-integrations/connectors/destination-jdbc/build.gradle +++ b/airbyte-integrations/connectors/destination-jdbc/build.gradle @@ -23,11 +23,11 @@ dependencies { // https://github.com/aesy/datasize implementation "io.aesy:datasize:1.0.0" - testImplementation "org.testcontainers:postgresql:1.15.3" + testImplementation libs.testcontainers.postgresql testImplementation "org.mockito:mockito-inline:4.1.0" integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') - integrationTestJavaImplementation "org.testcontainers:postgresql:1.15.3" + integrationTestJavaImplementation libs.testcontainers.postgresql implementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) integrationTestJavaImplementation files(project(':airbyte-integrations:bases:base-normalization').airbyteDocker.outputs) diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/AbstractJdbcDestination.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/AbstractJdbcDestination.java index d8afb121b1526..ff33348ed0e42 100644 --- a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/AbstractJdbcDestination.java +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/AbstractJdbcDestination.java @@ -6,7 +6,8 @@ import com.fasterxml.jackson.databind.JsonNode; import io.airbyte.commons.map.MoreMaps; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.integrations.BaseConnector; @@ -86,12 +87,15 @@ public static void attemptSQLCreateAndDropTableOperations(final String outputSch protected JdbcDatabase getDatabase(final JsonNode config) { final JsonNode jdbcConfig = toJdbcConfig(config); - return Databases.createJdbcDatabase( - jdbcConfig.get("username").asText(), - jdbcConfig.has("password") ? jdbcConfig.get("password").asText() : null, - jdbcConfig.get("jdbc_url").asText(), - driverClass, - getConnectionProperties(config)); + return new DefaultJdbcDatabase( + DataSourceFactory.create( + jdbcConfig.get("username").asText(), + jdbcConfig.has("password") ? jdbcConfig.get("password").asText() : null, + driverClass, + jdbcConfig.get("jdbc_url").asText(), + getConnectionProperties(config) + ) + ); } protected Map getConnectionProperties(final JsonNode config) { diff --git a/airbyte-integrations/connectors/destination-jdbc/src/test/java/io/airbyte/integrations/destination/jdbc/SqlOperationsUtilsTest.java b/airbyte-integrations/connectors/destination-jdbc/src/test/java/io/airbyte/integrations/destination/jdbc/SqlOperationsUtilsTest.java index 671bbda75dfde..a497ed32b57aa 100644 --- a/airbyte-integrations/connectors/destination-jdbc/src/test/java/io/airbyte/integrations/destination/jdbc/SqlOperationsUtilsTest.java +++ b/airbyte-integrations/connectors/destination-jdbc/src/test/java/io/airbyte/integrations/destination/jdbc/SqlOperationsUtilsTest.java @@ -14,7 +14,9 @@ import com.google.common.collect.Lists; import io.airbyte.commons.json.Jsons; import io.airbyte.db.DataTypeUtils; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.integrations.base.JavaBaseConstants; @@ -46,11 +48,14 @@ void setup() { final JsonNode config = createConfig(); - database = Databases.createJdbcDatabase( - config.get("username").asText(), - config.get("password").asText(), - config.get("jdbc_url").asText(), - "org.postgresql.Driver"); + database = new DefaultJdbcDatabase( + DataSourceFactory.create( + config.get("username").asText(), + config.get("password").asText(), + DatabaseDriver.POSTGRESQL.getDriverClassName(), + config.get("jdbc_url").asText() + ) + ); uuidSupplier = mock(Supplier.class); } diff --git a/airbyte-integrations/connectors/destination-kafka/build.gradle b/airbyte-integrations/connectors/destination-kafka/build.gradle index 159c3bb5de4b8..24657316da483 100644 --- a/airbyte-integrations/connectors/destination-kafka/build.gradle +++ b/airbyte-integrations/connectors/destination-kafka/build.gradle @@ -19,7 +19,7 @@ dependencies { integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-kafka') - integrationTestJavaImplementation "org.testcontainers:kafka:1.15.3" + integrationTestJavaImplementation libs.testcontainers.kafka implementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) } diff --git a/airbyte-integrations/connectors/destination-mariadb-columnstore/build.gradle b/airbyte-integrations/connectors/destination-mariadb-columnstore/build.gradle index 39274993ff4e2..174572671b717 100644 --- a/airbyte-integrations/connectors/destination-mariadb-columnstore/build.gradle +++ b/airbyte-integrations/connectors/destination-mariadb-columnstore/build.gradle @@ -22,5 +22,5 @@ dependencies { integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-mariadb-columnstore') - integrationTestJavaImplementation "org.testcontainers:mariadb:1.16.2" + integrationTestJavaImplementation libs.testcontainers.mariadb } diff --git a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/main/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestination.java b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/main/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestination.java index f8d6ed49025fc..cd4e6219e9e7a 100644 --- a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/main/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestination.java +++ b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/main/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestination.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.base.Destination; import io.airbyte.integrations.base.IntegrationRunner; @@ -23,7 +24,7 @@ public class MariadbColumnstoreDestination extends AbstractJdbcDestination implements Destination { private static final Logger LOGGER = LoggerFactory.getLogger(MariadbColumnstoreDestination.class); - public static final String DRIVER_CLASS = "org.mariadb.jdbc.Driver"; + public static final String DRIVER_CLASS = DatabaseDriver.MARIADB.getDriverClassName(); public static final List HOST_KEY = List.of("host"); public static final List PORT_KEY = List.of("port"); @@ -75,9 +76,9 @@ protected Map getDefaultConnectionProperties(final JsonNode conf @Override public JsonNode toJdbcConfig(final JsonNode config) { - final String jdbcUrl = String.format("jdbc:mariadb://%s:%s/%s", + final String jdbcUrl = String.format(DatabaseDriver.MARIADB.getUrlFormatString(), config.get("host").asText(), - config.get("port").asText(), + config.get("port").asInt(), config.get("database").asText()); final ImmutableMap.Builder configBuilder = ImmutableMap.builder() diff --git a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestinationAcceptanceTest.java index c2b193d22805d..d755925e4cc95 100644 --- a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestinationAcceptanceTest.java @@ -8,7 +8,9 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.base.JavaBaseConstants; import io.airbyte.integrations.destination.ExtendedNameTransformer; @@ -101,14 +103,17 @@ private List retrieveRecordsFromTable(final String tableName, final St } private static JdbcDatabase getDatabase(final JsonNode config) { - return Databases.createJdbcDatabase( - config.get("username").asText(), - config.has("password") ? config.get("password").asText() : null, - String.format("jdbc:mariadb://%s:%s/%s", - config.get("host").asText(), - config.get("port").asText(), - config.get("database").asText()), - MariadbColumnstoreDestination.DRIVER_CLASS); + return new DefaultJdbcDatabase( + DataSourceFactory.create( + config.get("username").asText(), + config.has("password") ? config.get("password").asText() : null, + MariadbColumnstoreDestination.DRIVER_CLASS, + String.format(DatabaseDriver.MARIADB.getUrlFormatString(), + config.get("host").asText(), + config.get("port").asInt(), + config.get("database").asText()) + ) + ); } @Override diff --git a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/SshMariadbColumnstoreDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/SshMariadbColumnstoreDestinationAcceptanceTest.java index 907397a1efa92..3b450643c706e 100644 --- a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/SshMariadbColumnstoreDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/SshMariadbColumnstoreDestinationAcceptanceTest.java @@ -9,7 +9,8 @@ import io.airbyte.commons.functional.CheckedFunction; import io.airbyte.commons.json.Jsons; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.integrations.base.JavaBaseConstants; import io.airbyte.integrations.base.ssh.SshBastionContainer; @@ -19,6 +20,8 @@ import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; +import org.jooq.DSLContext; +import org.jooq.SQLDialect; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testcontainers.containers.MariaDBContainer; @@ -73,10 +76,10 @@ protected String getDefaultSchema(final JsonNode config) { } @Override - protected List retrieveRecords(TestDestinationEnv testEnv, - String streamName, - String namespace, - JsonNode streamSchema) + protected List retrieveRecords(final TestDestinationEnv testEnv, + final String streamName, + final String namespace, + final JsonNode streamSchema) throws Exception { return retrieveRecordsFromTable(namingResolver.getRawTableName(streamName), namespace) .stream() @@ -101,13 +104,15 @@ private List retrieveRecordsFromTable(final String tableName, final St } private static Database getDatabaseFromConfig(final JsonNode config) { - return Databases.createMariaDbDatabase( + final DSLContext dslContext = DSLContextFactory.create( config.get("username").asText(), config.get("password").asText(), - String.format("jdbc:mariadb://%s:%s/%s", + DatabaseDriver.MARIADB.getDriverClassName(), + String.format(DatabaseDriver.MARIADB.getUrlFormatString(), config.get("host").asText(), - config.get("port").asText(), - config.get("database").asText())); + config.get("port").asInt(), + config.get("database").asText()), SQLDialect.MARIADB); + return new Database(dslContext); } @Override @@ -121,25 +126,25 @@ protected List resolveIdentifier(final String identifier) { } @Override - protected void setup(TestDestinationEnv testEnv) throws Exception { + protected void setup(final TestDestinationEnv testEnv) throws Exception { bastion.initAndStartBastion(); startAndInitJdbcContainer(); } private void startAndInitJdbcContainer() throws Exception { - DockerImageName mcsImage = DockerImageName.parse("fengdi/columnstore:1.5.2").asCompatibleSubstituteFor("mariadb"); + final DockerImageName mcsImage = DockerImageName.parse("fengdi/columnstore:1.5.2").asCompatibleSubstituteFor("mariadb"); db = new MariaDBContainer<>(mcsImage) .withNetwork(bastion.getNetWork()); db.start(); - String createUser = String.format("CREATE USER '%s'@'%%' IDENTIFIED BY '%s';", db.getUsername(), db.getPassword()); - String grantAll = String.format("GRANT ALL PRIVILEGES ON *.* TO '%s'@'%%' IDENTIFIED BY '%s';", db.getUsername(), db.getPassword()); - String createDb = String.format("CREATE DATABASE %s DEFAULT CHARSET = utf8;", db.getDatabaseName()); + final String createUser = String.format("CREATE USER '%s'@'%%' IDENTIFIED BY '%s';", db.getUsername(), db.getPassword()); + final String grantAll = String.format("GRANT ALL PRIVILEGES ON *.* TO '%s'@'%%' IDENTIFIED BY '%s';", db.getUsername(), db.getPassword()); + final String createDb = String.format("CREATE DATABASE %s DEFAULT CHARSET = utf8;", db.getDatabaseName()); db.execInContainer("mariadb", "-e", createUser + grantAll + createDb); } @Override - protected void tearDown(TestDestinationEnv testEnv) { + protected void tearDown(final TestDestinationEnv testEnv) { bastion.stopAndCloseContainers(db); } diff --git a/airbyte-integrations/connectors/destination-meilisearch/build.gradle b/airbyte-integrations/connectors/destination-meilisearch/build.gradle index 6ea15c9d08914..c1a10c4f776f1 100644 --- a/airbyte-integrations/connectors/destination-meilisearch/build.gradle +++ b/airbyte-integrations/connectors/destination-meilisearch/build.gradle @@ -20,7 +20,7 @@ dependencies { integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-meilisearch') - integrationTestJavaImplementation "org.testcontainers:testcontainers:1.15.3" + integrationTestJavaImplementation libs.testcontainers implementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) } diff --git a/airbyte-integrations/connectors/destination-mongodb-strict-encrypt/build.gradle b/airbyte-integrations/connectors/destination-mongodb-strict-encrypt/build.gradle index 21ebce1af5f99..81ee2032de49a 100644 --- a/airbyte-integrations/connectors/destination-mongodb-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/destination-mongodb-strict-encrypt/build.gradle @@ -18,7 +18,7 @@ dependencies { implementation project(':airbyte-integrations:connectors:destination-mongodb') implementation 'org.mongodb:mongodb-driver-sync:4.3.0' - testImplementation 'org.testcontainers:mongodb:1.15.3' + testImplementation libs.testcontainers.mongodb integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-mongodb-strict-encrypt') integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') diff --git a/airbyte-integrations/connectors/destination-mongodb/build.gradle b/airbyte-integrations/connectors/destination-mongodb/build.gradle index 0a5f8f840e622..253bd086c9c56 100644 --- a/airbyte-integrations/connectors/destination-mongodb/build.gradle +++ b/airbyte-integrations/connectors/destination-mongodb/build.gradle @@ -18,7 +18,7 @@ dependencies { implementation 'org.mongodb:mongodb-driver-sync:4.3.0' - testImplementation 'org.testcontainers:mongodb:1.15.3' + testImplementation libs.testcontainers.mongodb integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-mongodb') integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') diff --git a/airbyte-integrations/connectors/destination-mongodb/src/main/java/io/airbyte/integrations/destination/mongodb/MongodbRecordConsumer.java b/airbyte-integrations/connectors/destination-mongodb/src/main/java/io/airbyte/integrations/destination/mongodb/MongodbRecordConsumer.java index 8cce8c2cc8a2a..4c0d64b963c65 100644 --- a/airbyte-integrations/connectors/destination-mongodb/src/main/java/io/airbyte/integrations/destination/mongodb/MongodbRecordConsumer.java +++ b/airbyte-integrations/connectors/destination-mongodb/src/main/java/io/airbyte/integrations/destination/mongodb/MongodbRecordConsumer.java @@ -18,6 +18,7 @@ import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteRecordMessage; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import java.nio.charset.Charset; import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -116,7 +117,8 @@ private void insertRecordToTmpCollection(final MongodbWriteConfig writeConfig, try { final AirbyteRecordMessage recordMessage = message.getRecord(); final Map result = objectMapper.convertValue(recordMessage.getData(), new TypeReference<>() {}); - final var newDocumentDataHashCode = UUID.nameUUIDFromBytes(DigestUtils.md5Hex(Jsons.toBytes(recordMessage.getData())).getBytes()).toString(); + final var newDocumentDataHashCode = UUID.nameUUIDFromBytes(DigestUtils.md5Hex(Jsons.toBytes(recordMessage.getData())).getBytes( + Charset.defaultCharset())).toString(); final var newDocument = new Document(); newDocument.put(AIRBYTE_DATA, new Document(result)); newDocument.put(AIRBYTE_DATA_HASH, newDocumentDataHashCode); diff --git a/airbyte-integrations/connectors/destination-mssql-strict-encrypt/build.gradle b/airbyte-integrations/connectors/destination-mssql-strict-encrypt/build.gradle index c641cf2b3a712..69932bb3d6dfa 100644 --- a/airbyte-integrations/connectors/destination-mssql-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/destination-mssql-strict-encrypt/build.gradle @@ -16,11 +16,12 @@ dependencies { implementation project(':airbyte-protocol:models') implementation project(':airbyte-integrations:connectors:destination-jdbc') implementation project(':airbyte-integrations:connectors:destination-mssql') + implementation project(':airbyte-test-utils') implementation 'com.microsoft.sqlserver:mssql-jdbc:8.4.1.jre14' testImplementation 'org.apache.commons:commons-lang3:3.11' - testImplementation "org.testcontainers:mssqlserver:1.15.3" + testImplementation libs.testcontainers.mssqlserver integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-mssql-strict-encrypt') diff --git a/airbyte-integrations/connectors/destination-mssql-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/mssql_strict_encrypt/MssqlStrictEncryptDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-mssql-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/mssql_strict_encrypt/MssqlStrictEncryptDestinationAcceptanceTest.java index a84c56bcd3cfc..76ca12d6cc0b8 100644 --- a/airbyte-integrations/connectors/destination-mssql-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/mssql_strict_encrypt/MssqlStrictEncryptDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-mssql-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/mssql_strict_encrypt/MssqlStrictEncryptDestinationAcceptanceTest.java @@ -13,17 +13,21 @@ import io.airbyte.commons.resources.MoreResources; import io.airbyte.commons.string.Strings; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.integrations.base.JavaBaseConstants; import io.airbyte.integrations.base.ssh.SshHelpers; import io.airbyte.integrations.destination.ExtendedNameTransformer; import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; import io.airbyte.protocol.models.ConnectorSpecification; +import io.airbyte.test.utils.DatabaseConnectionHelper; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; +import org.jooq.DSLContext; +import org.jooq.SQLDialect; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; @@ -121,8 +125,8 @@ protected List resolveIdentifier(final String identifier) { } private List retrieveRecordsFromTable(final String tableName, final String schemaName) throws SQLException { - return Databases.createSqlServerDatabase(db.getUsername(), db.getPassword(), - db.getJdbcUrl()).query( + final DSLContext dslContext = DatabaseConnectionHelper.createDslContext(db, null); + return new Database(dslContext).query( ctx -> { ctx.fetch(String.format("USE %s;", config.get("database"))); return ctx @@ -135,12 +139,14 @@ private List retrieveRecordsFromTable(final String tableName, final St } private static Database getDatabase(final JsonNode config) { - return Databases.createSqlServerDatabase( + final DSLContext dslContext = DSLContextFactory.create( config.get("username").asText(), config.get("password").asText(), - String.format("jdbc:sqlserver://%s:%s", + DatabaseDriver.MSSQLSERVER.getDriverClassName(), + String.format(DatabaseDriver.MSSQLSERVER.getUrlFormatString(), config.get("host").asText(), - config.get("port").asInt())); + config.get("port").asInt()), SQLDialect.DEFAULT); + return new Database(dslContext); } @Override diff --git a/airbyte-integrations/connectors/destination-mssql/build.gradle b/airbyte-integrations/connectors/destination-mssql/build.gradle index 394142734d144..0950b705f4a08 100644 --- a/airbyte-integrations/connectors/destination-mssql/build.gradle +++ b/airbyte-integrations/connectors/destination-mssql/build.gradle @@ -14,11 +14,12 @@ dependencies { implementation project(':airbyte-integrations:bases:base-java') implementation project(':airbyte-protocol:models') implementation project(':airbyte-integrations:connectors:destination-jdbc') + implementation project(':airbyte-test-utils') implementation 'com.microsoft.sqlserver:mssql-jdbc:8.4.1.jre14' testImplementation 'org.apache.commons:commons-lang3:3.11' - testImplementation "org.testcontainers:mssqlserver:1.15.3" + testImplementation libs.testcontainers.mssqlserver integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-mssql') diff --git a/airbyte-integrations/connectors/destination-mssql/src/main/java/io/airbyte/integrations/destination/mssql/MSSQLDestination.java b/airbyte-integrations/connectors/destination-mssql/src/main/java/io/airbyte/integrations/destination/mssql/MSSQLDestination.java index 7acba580fe250..bd4f667a2e381 100644 --- a/airbyte-integrations/connectors/destination-mssql/src/main/java/io/airbyte/integrations/destination/mssql/MSSQLDestination.java +++ b/airbyte-integrations/connectors/destination-mssql/src/main/java/io/airbyte/integrations/destination/mssql/MSSQLDestination.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.base.Destination; import io.airbyte.integrations.base.IntegrationRunner; import io.airbyte.integrations.base.ssh.SshWrappedDestination; @@ -23,7 +24,7 @@ public class MSSQLDestination extends AbstractJdbcDestination implements Destina private static final Logger LOGGER = LoggerFactory.getLogger(MSSQLDestination.class); - public static final String DRIVER_CLASS = "com.microsoft.sqlserver.jdbc.SQLServerDriver"; + public static final String DRIVER_CLASS = DatabaseDriver.MSSQLSERVER.getDriverClassName(); public static final String JDBC_URL_PARAMS_KEY = "jdbc_url_params"; public static final List HOST_KEY = List.of("host"); public static final List PORT_KEY = List.of("port"); diff --git a/airbyte-integrations/connectors/destination-mssql/src/test-integration/java/io/airbyte/integrations/destination/mssql/MSSQLDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-mssql/src/test-integration/java/io/airbyte/integrations/destination/mssql/MSSQLDestinationAcceptanceTest.java index d0a2dc25eb7ba..b52d9220626f2 100644 --- a/airbyte-integrations/connectors/destination-mssql/src/test-integration/java/io/airbyte/integrations/destination/mssql/MSSQLDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-mssql/src/test-integration/java/io/airbyte/integrations/destination/mssql/MSSQLDestinationAcceptanceTest.java @@ -5,20 +5,22 @@ package io.airbyte.integrations.destination.mssql; import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.string.Strings; import io.airbyte.db.Database; -import io.airbyte.db.Databases; import io.airbyte.integrations.base.JavaBaseConstants; import io.airbyte.integrations.destination.ExtendedNameTransformer; import io.airbyte.integrations.standardtest.destination.JdbcDestinationAcceptanceTest; import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DatabaseDriver; +import io.airbyte.test.utils.DatabaseConnectionHelper; import java.sql.SQLException; import java.util.List; import java.util.stream.Collectors; +import org.jooq.DSLContext; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.testcontainers.containers.MSSQLServerContainer; @@ -27,8 +29,6 @@ public class MSSQLDestinationAcceptanceTest extends JdbcDestinationAcceptanceTes private static MSSQLServerContainer db; private final ExtendedNameTransformer namingResolver = new ExtendedNameTransformer(); - private final ObjectMapper mapper = new ObjectMapper(); - private JsonNode configWithoutDbName; private JsonNode config; @Override @@ -98,8 +98,8 @@ protected List retrieveNormalizedRecords(final TestDestinationEnv env, } private List retrieveRecordsFromTable(final String tableName, final String schemaName) throws SQLException { - return Databases.createSqlServerDatabase(db.getUsername(), db.getPassword(), - db.getJdbcUrl()).query( + final DSLContext dslContext = DatabaseConnectionHelper.createDslContext(db, null); + return new Database(dslContext).query( ctx -> { ctx.fetch(String.format("USE %s;", config.get("database"))); return ctx @@ -117,16 +117,14 @@ protected static void init() { } private static Database getDatabase(final JsonNode config) { - // todo (cgardens) - rework this abstraction so that we do not have to pass a null into the - // constructor. at least explicitly handle it, even if the impl doesn't change. - return Databases.createDatabase( + final DSLContext dslContext = DSLContextFactory.create( config.get("username").asText(), config.get("password").asText(), + DatabaseDriver.MSSQLSERVER.getDriverClassName(), String.format("jdbc:sqlserver://%s:%s", config.get("host").asText(), - config.get("port").asInt()), - "com.microsoft.sqlserver.jdbc.SQLServerDriver", - null); + config.get("port").asInt()), null); + return new Database(dslContext); } // how to interact with the mssql test container manaully. @@ -134,7 +132,7 @@ private static Database getDatabase(final JsonNode config) { // 2. /opt/mssql-tools/bin/sqlcmd -S localhost -U SA -P "A_Str0ng_Required_Password" @Override protected void setup(final TestDestinationEnv testEnv) throws SQLException { - configWithoutDbName = getConfig(db); + final JsonNode configWithoutDbName = getConfig(db); final String dbName = Strings.addRandomSuffix("db", "_", 10); final Database database = getDatabase(configWithoutDbName); diff --git a/airbyte-integrations/connectors/destination-mssql/src/test-integration/java/io/airbyte/integrations/destination/mssql/MSSQLDestinationAcceptanceTestSSL.java b/airbyte-integrations/connectors/destination-mssql/src/test-integration/java/io/airbyte/integrations/destination/mssql/MSSQLDestinationAcceptanceTestSSL.java index 8717a8c7f6c7f..cf182f0e8c3a3 100644 --- a/airbyte-integrations/connectors/destination-mssql/src/test-integration/java/io/airbyte/integrations/destination/mssql/MSSQLDestinationAcceptanceTestSSL.java +++ b/airbyte-integrations/connectors/destination-mssql/src/test-integration/java/io/airbyte/integrations/destination/mssql/MSSQLDestinationAcceptanceTestSSL.java @@ -10,15 +10,19 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.string.Strings; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.integrations.base.JavaBaseConstants; import io.airbyte.integrations.destination.ExtendedNameTransformer; import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; +import io.airbyte.test.utils.DatabaseConnectionHelper; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; +import org.jooq.DSLContext; +import org.jooq.SQLDialect; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.testcontainers.containers.MSSQLServerContainer; @@ -119,8 +123,8 @@ protected List resolveIdentifier(final String identifier) { } private List retrieveRecordsFromTable(final String tableName, final String schemaName) throws SQLException { - return Databases.createSqlServerDatabase(db.getUsername(), db.getPassword(), - db.getJdbcUrl()).query( + final DSLContext dslContext = DatabaseConnectionHelper.createDslContext(db, SQLDialect.DEFAULT); + return new Database(dslContext).query( ctx -> { ctx.fetch(String.format("USE %s;", config.get("database"))); return ctx @@ -140,19 +144,17 @@ protected static void init() { } private static Database getDatabase(final JsonNode config) { - // todo (cgardens) - rework this abstraction so that we do not have to pass a null into the - // constructor. at least explicitly handle it, even if the impl doesn't change. - return Databases.createDatabase( + final DSLContext dslContext = DSLContextFactory.create( config.get("username").asText(), config.get("password").asText(), + DatabaseDriver.MSSQLSERVER.getDriverClassName(), String.format("jdbc:sqlserver://%s:%s", config.get("host").asText(), - config.get("port").asInt()), - "com.microsoft.sqlserver.jdbc.SQLServerDriver", - null); + config.get("port").asInt()), null); + return new Database(dslContext); } - // how to interact with the mssql test container manaully. + // how to interact with the mssql test container manually. // 1. exec into mssql container (not the test container container) // 2. /opt/mssql-tools/bin/sqlcmd -S localhost -U SA -P "A_Str0ng_Required_Password" @Override diff --git a/airbyte-integrations/connectors/destination-mssql/src/test-integration/java/io/airbyte/integrations/destination/mssql/SshMSSQLDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-mssql/src/test-integration/java/io/airbyte/integrations/destination/mssql/SshMSSQLDestinationAcceptanceTest.java index e0a1cba4bb3de..1780fed4798af 100644 --- a/airbyte-integrations/connectors/destination-mssql/src/test-integration/java/io/airbyte/integrations/destination/mssql/SshMSSQLDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-mssql/src/test-integration/java/io/airbyte/integrations/destination/mssql/SshMSSQLDestinationAcceptanceTest.java @@ -10,7 +10,9 @@ import io.airbyte.commons.functional.CheckedFunction; import io.airbyte.commons.json.Jsons; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.integrations.base.JavaBaseConstants; import io.airbyte.integrations.base.ssh.SshBastionContainer; @@ -21,7 +23,9 @@ import java.util.List; import java.util.Objects; import java.util.stream.Collectors; +import javax.sql.DataSource; import org.apache.commons.lang3.RandomStringUtils; +import org.jooq.DSLContext; import org.testcontainers.containers.JdbcDatabaseContainer; import org.testcontainers.containers.MSSQLServerContainer; import org.testcontainers.containers.Network; @@ -120,14 +124,14 @@ public ImmutableMap.Builder getMSSQLDbConfigBuilder(final JdbcDa } private static Database getDatabaseFromConfig(final JsonNode config) { - return Databases.createDatabase( + final DSLContext dslContext = DSLContextFactory.create( config.get("username").asText(), config.get("password").asText(), + DatabaseDriver.MSSQLSERVER.getDriverClassName(), String.format("jdbc:sqlserver://%s:%s", config.get("host").asText(), - config.get("port").asInt()), - "com.microsoft.sqlserver.jdbc.SQLServerDriver", - null); + config.get("port").asInt()), null); + return new Database(dslContext); } private List retrieveRecordsFromTable(final String tableName, final String schemaName) throws Exception { diff --git a/airbyte-integrations/connectors/destination-mysql-strict-encrypt/build.gradle b/airbyte-integrations/connectors/destination-mysql-strict-encrypt/build.gradle index 6083f79111ced..a058837e5a631 100644 --- a/airbyte-integrations/connectors/destination-mysql-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/destination-mysql-strict-encrypt/build.gradle @@ -20,7 +20,7 @@ dependencies { integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-mysql') - integrationTestJavaImplementation "org.testcontainers:mysql:1.15.3" + integrationTestJavaImplementation libs.testcontainers.mysql implementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) integrationTestJavaImplementation files(project(':airbyte-integrations:bases:base-normalization').airbyteDocker.outputs) diff --git a/airbyte-integrations/connectors/destination-mysql-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/mysql/MySQLStrictEncryptDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-mysql-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/mysql/MySQLStrictEncryptDestinationAcceptanceTest.java index 0c29d20a296b8..885a56a22adad 100644 --- a/airbyte-integrations/connectors/destination-mysql-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/mysql/MySQLStrictEncryptDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-mysql-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/mysql/MySQLStrictEncryptDestinationAcceptanceTest.java @@ -10,7 +10,9 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import io.airbyte.commons.json.Jsons; -import io.airbyte.db.Databases; +import io.airbyte.db.Database; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DataSourceFactory; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.integrations.base.JavaBaseConstants; import io.airbyte.integrations.destination.ExtendedNameTransformer; @@ -27,6 +29,8 @@ import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; +import javax.sql.DataSource; +import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.junit.jupiter.api.Test; import org.testcontainers.containers.MySQLContainer; @@ -99,15 +103,15 @@ protected List retrieveRecords(final TestDestinationEnv testEnv, } private List retrieveRecordsFromTable(final String tableName, final String schemaName) throws SQLException { - return Databases.createDatabase( + final DSLContext dslContext = DSLContextFactory.create( db.getUsername(), db.getPassword(), + db.getDriverClassName(), String.format("jdbc:mysql://%s:%s/%s?useSSL=true&requireSSL=true&verifyServerCertificate=false", db.getHost(), db.getFirstMappedPort(), - db.getDatabaseName()), - "com.mysql.cj.jdbc.Driver", - SQLDialect.MYSQL).query( + db.getDatabaseName()), SQLDialect.MYSQL); + return new Database(dslContext).query( ctx -> ctx .fetch(String.format("SELECT * FROM %s.%s ORDER BY %s ASC;", schemaName, tableName, JavaBaseConstants.COLUMN_NAME_EMITTED_AT)) @@ -160,15 +164,15 @@ private void grantCorrectPermissions() { private void executeQuery(final String query) { try { - Databases.createDatabase( - "root", - "test", + final DSLContext dslContext = DSLContextFactory.create( + db.getUsername(), + db.getPassword(), + db.getDriverClassName(), String.format("jdbc:mysql://%s:%s/%s?useSSL=true&requireSSL=true&verifyServerCertificate=false", db.getHost(), db.getFirstMappedPort(), - db.getDatabaseName()), - "com.mysql.cj.jdbc.Driver", - SQLDialect.MYSQL).query( + db.getDatabaseName()), SQLDialect.MYSQL); + new Database(dslContext).query( ctx -> ctx .execute(query)); } catch (final SQLException e) { diff --git a/airbyte-integrations/connectors/destination-mysql/build.gradle b/airbyte-integrations/connectors/destination-mysql/build.gradle index ad7e68f4c4025..94ca343379804 100644 --- a/airbyte-integrations/connectors/destination-mysql/build.gradle +++ b/airbyte-integrations/connectors/destination-mysql/build.gradle @@ -19,7 +19,7 @@ dependencies { integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-mysql') - integrationTestJavaImplementation "org.testcontainers:mysql:1.15.3" + integrationTestJavaImplementation libs.testcontainers.mysql implementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) integrationTestJavaImplementation files(project(':airbyte-integrations:bases:base-normalization').airbyteDocker.outputs) diff --git a/airbyte-integrations/connectors/destination-mysql/src/main/java/io/airbyte/integrations/destination/mysql/MySQLDestination.java b/airbyte-integrations/connectors/destination-mysql/src/main/java/io/airbyte/integrations/destination/mysql/MySQLDestination.java index da86b862edb6d..4675fd75c539c 100644 --- a/airbyte-integrations/connectors/destination-mysql/src/main/java/io/airbyte/integrations/destination/mysql/MySQLDestination.java +++ b/airbyte-integrations/connectors/destination-mysql/src/main/java/io/airbyte/integrations/destination/mysql/MySQLDestination.java @@ -8,6 +8,7 @@ import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.map.MoreMaps; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.base.Destination; import io.airbyte.integrations.base.IntegrationRunner; @@ -34,7 +35,7 @@ public class MySQLDestination extends AbstractJdbcDestination implements Destina public static final String SSL_KEY = "ssl"; public static final String USERNAME_KEY = "username"; - public static final String DRIVER_CLASS = "com.mysql.cj.jdbc.Driver"; + public static final String DRIVER_CLASS = DatabaseDriver.MYSQL.getDriverClassName(); static final Map DEFAULT_JDBC_PARAMETERS = ImmutableMap.of( // zero dates by default cannot be parsed into java date objects (they will throw an error) diff --git a/airbyte-integrations/connectors/destination-mysql/src/test-integration/java/io/airbyte/integrations/destination/mysql/MySQLDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-mysql/src/test-integration/java/io/airbyte/integrations/destination/mysql/MySQLDestinationAcceptanceTest.java index 6016ca02c3be0..9dcada9cbde58 100644 --- a/airbyte-integrations/connectors/destination-mysql/src/test-integration/java/io/airbyte/integrations/destination/mysql/MySQLDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-mysql/src/test-integration/java/io/airbyte/integrations/destination/mysql/MySQLDestinationAcceptanceTest.java @@ -10,7 +10,10 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import io.airbyte.commons.json.Jsons; -import io.airbyte.db.Databases; +import io.airbyte.db.Database; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.integrations.base.JavaBaseConstants; import io.airbyte.integrations.destination.ExtendedNameTransformer; @@ -27,6 +30,8 @@ import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; +import javax.sql.DataSource; +import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.junit.jupiter.api.Test; import org.testcontainers.containers.MySQLContainer; @@ -101,15 +106,15 @@ protected List retrieveRecords(final TestDestinationEnv testEnv, } private List retrieveRecordsFromTable(final String tableName, final String schemaName) throws SQLException { - return Databases.createDatabase( + final DSLContext dslContext = DSLContextFactory.create( db.getUsername(), db.getPassword(), - String.format("jdbc:mysql://%s:%s/%s", + db.getDriverClassName(), + String.format(DatabaseDriver.MYSQL.getUrlFormatString(), db.getHost(), db.getFirstMappedPort(), - db.getDatabaseName()), - "com.mysql.cj.jdbc.Driver", - SQLDialect.MYSQL).query( + db.getDatabaseName()), SQLDialect.MYSQL); + return new Database(dslContext).query( ctx -> ctx .fetch(String.format("SELECT * FROM %s.%s ORDER BY %s ASC;", schemaName, tableName, JavaBaseConstants.COLUMN_NAME_EMITTED_AT)) @@ -162,15 +167,15 @@ private void grantCorrectPermissions() { private void executeQuery(final String query) { try { - Databases.createDatabase( + final DSLContext dslContext = DSLContextFactory.create( "root", "test", - String.format("jdbc:mysql://%s:%s/%s", + db.getDriverClassName(), + String.format(DatabaseDriver.MYSQL.getUrlFormatString(), db.getHost(), db.getFirstMappedPort(), - db.getDatabaseName()), - "com.mysql.cj.jdbc.Driver", - SQLDialect.MYSQL).query( + db.getDatabaseName()), SQLDialect.MYSQL); + new Database(dslContext).query( ctx -> ctx .execute(query)); } catch (final SQLException e) { diff --git a/airbyte-integrations/connectors/destination-mysql/src/test-integration/java/io/airbyte/integrations/destination/mysql/SshMySQLDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-mysql/src/test-integration/java/io/airbyte/integrations/destination/mysql/SshMySQLDestinationAcceptanceTest.java index 168e017eac94d..b75aa32375604 100644 --- a/airbyte-integrations/connectors/destination-mysql/src/test-integration/java/io/airbyte/integrations/destination/mysql/SshMySQLDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-mysql/src/test-integration/java/io/airbyte/integrations/destination/mysql/SshMySQLDestinationAcceptanceTest.java @@ -12,7 +12,8 @@ import io.airbyte.commons.io.IOs; import io.airbyte.commons.json.Jsons; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DataSourceFactory; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.integrations.base.JavaBaseConstants; import io.airbyte.integrations.base.ssh.SshTunnel; @@ -22,7 +23,10 @@ import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; +import javax.sql.DataSource; import org.apache.commons.lang3.RandomStringUtils; +import org.jooq.DSLContext; +import org.jooq.SQLDialect; /** * Abstract class that allows us to avoid duplicating testing logic for testing SSH with a key file @@ -111,12 +115,14 @@ protected List resolveIdentifier(final String identifier) { } private static Database getDatabaseFromConfig(final JsonNode config) { - return Databases.createMySqlDatabase( + final DSLContext dslContext = DSLContextFactory.create( config.get("username").asText(), config.get("password").asText(), + "com.mysql.cj.jdbc.Driver", String.format("jdbc:mysql://%s:%s", config.get("host").asText(), - config.get("port").asText())); + config.get("port").asText()), SQLDialect.MYSQL); + return new Database(dslContext); } private List retrieveRecordsFromTable(final String tableName, final String schemaName) throws Exception { diff --git a/airbyte-integrations/connectors/destination-mysql/src/test-integration/java/io/airbyte/integrations/destination/mysql/SslMySQLDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-mysql/src/test-integration/java/io/airbyte/integrations/destination/mysql/SslMySQLDestinationAcceptanceTest.java index b60ac9b2950b5..8aa616a2b1ea3 100644 --- a/airbyte-integrations/connectors/destination-mysql/src/test-integration/java/io/airbyte/integrations/destination/mysql/SslMySQLDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-mysql/src/test-integration/java/io/airbyte/integrations/destination/mysql/SslMySQLDestinationAcceptanceTest.java @@ -7,13 +7,17 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; -import io.airbyte.db.Databases; +import io.airbyte.db.Database; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DataSourceFactory; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.integrations.base.JavaBaseConstants; import io.airbyte.integrations.destination.ExtendedNameTransformer; import java.sql.SQLException; import java.util.List; import java.util.stream.Collectors; +import javax.sql.DataSource; +import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.junit.jupiter.api.Test; import org.testcontainers.containers.MySQLContainer; @@ -92,15 +96,15 @@ protected void tearDown(final TestDestinationEnv testEnv) { } private List retrieveRecordsFromTable(final String tableName, final String schemaName) throws SQLException { - return Databases.createDatabase( + final DSLContext dslContext = DSLContextFactory.create( db.getUsername(), db.getPassword(), + db.getDriverClassName(), String.format("jdbc:mysql://%s:%s/%s?useSSL=true&requireSSL=true&verifyServerCertificate=false", db.getHost(), db.getFirstMappedPort(), - db.getDatabaseName()), - "com.mysql.cj.jdbc.Driver", - SQLDialect.MYSQL).query( + db.getDatabaseName()), SQLDialect.DEFAULT); + return new Database(dslContext).query( ctx -> ctx .fetch(String.format("SELECT * FROM %s.%s ORDER BY %s ASC;", schemaName, tableName, JavaBaseConstants.COLUMN_NAME_EMITTED_AT)) @@ -124,15 +128,15 @@ private void grantCorrectPermissions() { private void executeQuery(final String query) { try { - Databases.createDatabase( + final DSLContext dslContext = DSLContextFactory.create( "root", "test", + db.getDriverClassName(), String.format("jdbc:mysql://%s:%s/%s?useSSL=true&requireSSL=true&verifyServerCertificate=false", db.getHost(), db.getFirstMappedPort(), - db.getDatabaseName()), - "com.mysql.cj.jdbc.Driver", - SQLDialect.MYSQL).query( + db.getDatabaseName()), SQLDialect.DEFAULT); + new Database(dslContext).query( ctx -> ctx .execute(query)); } catch (final SQLException e) { diff --git a/airbyte-integrations/connectors/destination-oracle-strict-encrypt/build.gradle b/airbyte-integrations/connectors/destination-oracle-strict-encrypt/build.gradle index 4d155c664295c..f7a4beb847f37 100644 --- a/airbyte-integrations/connectors/destination-oracle-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/destination-oracle-strict-encrypt/build.gradle @@ -25,7 +25,7 @@ dependencies { testImplementation project(':airbyte-test-utils') testImplementation 'org.apache.commons:commons-lang3:3.11' - testImplementation 'org.testcontainers:oracle-xe:1.16.0' + testImplementation libs.testcontainers.oracle.xe integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-oracle') diff --git a/airbyte-integrations/connectors/destination-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/oracle_strict_encrypt/OracleStrictEncryptDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/oracle_strict_encrypt/OracleStrictEncryptDestinationAcceptanceTest.java index be74f4c06cffb..a9bcfcd40a0ed 100644 --- a/airbyte-integrations/connectors/destination-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/oracle_strict_encrypt/OracleStrictEncryptDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/oracle_strict_encrypt/OracleStrictEncryptDestinationAcceptanceTest.java @@ -13,7 +13,10 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.string.Strings; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.integrations.destination.ExtendedNameTransformer; @@ -24,6 +27,8 @@ import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; +import javax.sql.DataSource; +import org.jooq.DSLContext; import org.jooq.JSONFormat; import org.junit.Test; @@ -113,24 +118,30 @@ protected List resolveIdentifier(final String identifier) { private List retrieveRecordsFromTable(final String tableName, final String schemaName) throws SQLException { final String query = String.format("SELECT * FROM %s.%s ORDER BY %s ASC", schemaName, tableName, OracleDestination.COLUMN_NAME_EMITTED_AT); - final List result = getDatabase(config).query(ctx -> ctx.fetch(query).stream().toList()); - return result - .stream() - .map(r -> r.formatJSON(JSON_FORMAT)) - .map(Jsons::deserialize) - .collect(Collectors.toList()); + + try (final DSLContext dslContext = getDslContext(config)) { + final List result = getDatabase(dslContext).query(ctx -> ctx.fetch(query).stream().toList()); + return result + .stream() + .map(r -> r.formatJSON(JSON_FORMAT)) + .map(Jsons::deserialize) + .collect(Collectors.toList()); + } + } + + private static Database getDatabase(final DSLContext dslContext) { + return new Database(dslContext); } - private static Database getDatabase(final JsonNode config) { - return Databases.createDatabase( + private static DSLContext getDslContext(final JsonNode config) { + return DSLContextFactory.create( config.get("username").asText(), config.get("password").asText(), - String.format("jdbc:oracle:thin:@//%s:%s/%s", + DatabaseDriver.ORACLE.getDriverClassName(), + String.format(DatabaseDriver.ORACLE.getUrlFormatString(), config.get("host").asText(), - config.get("port").asText(), - config.get("sid").asText()), - "oracle.jdbc.driver.OracleDriver", - null); + config.get("port").asInt(), + config.get("sid").asText()), null); } @Override @@ -144,14 +155,14 @@ protected void setup(final TestDestinationEnv testEnv) throws Exception { config = getConfig(db); - final Database database = getDatabase(config); - database.query( - ctx -> ctx.fetch(String.format("CREATE USER %s IDENTIFIED BY %s", schemaName, schemaName))); - database.query(ctx -> ctx.fetch(String.format("GRANT ALL PRIVILEGES TO %s", schemaName))); + try (final DSLContext dslContext = getDslContext(config)) { + final Database database = getDatabase(dslContext); + database.query( + ctx -> ctx.fetch(String.format("CREATE USER %s IDENTIFIED BY %s", schemaName, schemaName))); + database.query(ctx -> ctx.fetch(String.format("GRANT ALL PRIVILEGES TO %s", schemaName))); - database.close(); - - ((ObjectNode) config).put("schema", dbName); + ((ObjectNode) config).put("schema", dbName); + } } @Override @@ -163,18 +174,19 @@ protected void tearDown(final TestDestinationEnv testEnv) { @Test public void testEncryption() throws SQLException { final String algorithm = "AES256"; - final JsonNode config = getConfig(); - - final JdbcDatabase database = Databases.createJdbcDatabase(config.get("username").asText(), - config.get("password").asText(), - String.format("jdbc:oracle:thin:@//%s:%s/%s", - config.get("host").asText(), - config.get("port").asText(), - config.get("sid").asText()), - "oracle.jdbc.driver.OracleDriver", - JdbcUtils.parseJdbcParameters("oracle.net.encryption_client=REQUIRED;" + - "oracle.net.encryption_types_client=( " + algorithm + " )", ";")); + final DataSource dataSource = + DataSourceFactory.create( + config.get("username").asText(), + config.get("password").asText(), + DatabaseDriver.ORACLE.getDriverClassName(), + String.format(DatabaseDriver.ORACLE.getUrlFormatString(), + config.get("host").asText(), + config.get("port").asInt(), + config.get("sid").asText()), + JdbcUtils.parseJdbcParameters("oracle.net.encryption_client=REQUIRED;" + + "oracle.net.encryption_types_client=( " + algorithm + " )", ";")); + final JdbcDatabase database = new DefaultJdbcDatabase(dataSource); final String networkServiceBanner = "select network_service_banner from v$session_connect_info where sid in (select distinct sid from v$mystat)"; @@ -191,15 +203,16 @@ public void testCheckProtocol() throws SQLException { final String algorithm = clone.get("encryption") .get("encryption_algorithm").asText(); - final JdbcDatabase database = Databases.createJdbcDatabase(clone.get("username").asText(), - clone.get("password").asText(), - String.format("jdbc:oracle:thin:@//%s:%s/%s", - clone.get("host").asText(), - clone.get("port").asText(), - clone.get("sid").asText()), - "oracle.jdbc.driver.OracleDriver", - JdbcUtils.parseJdbcParameters("oracle.net.encryption_client=REQUIRED;" + - "oracle.net.encryption_types_client=( " + algorithm + " )", ";")); + final DataSource dataSource = + DataSourceFactory.create(config.get("username").asText(), config.get("password").asText(), + DatabaseDriver.ORACLE.getDriverClassName(), + String.format(DatabaseDriver.ORACLE.getUrlFormatString(), + config.get("host").asText(), + config.get("port").asInt(), + config.get("sid").asText()), + JdbcUtils.parseJdbcParameters("oracle.net.encryption_client=REQUIRED;" + + "oracle.net.encryption_types_client=( " + algorithm + " )", ";")); + final JdbcDatabase database = new DefaultJdbcDatabase(dataSource); final String networkServiceBanner = "SELECT sys_context('USERENV', 'NETWORK_PROTOCOL') as network_protocol FROM dual"; final List collect = database.queryJsons(networkServiceBanner); diff --git a/airbyte-integrations/connectors/destination-oracle/build.gradle b/airbyte-integrations/connectors/destination-oracle/build.gradle index dd031f075ec42..007d9c9e69c37 100644 --- a/airbyte-integrations/connectors/destination-oracle/build.gradle +++ b/airbyte-integrations/connectors/destination-oracle/build.gradle @@ -22,7 +22,7 @@ dependencies { implementation "com.oracle.database.jdbc:ojdbc8-production:19.7.0.0" testImplementation 'org.apache.commons:commons-lang3:3.11' - testImplementation 'org.testcontainers:oracle-xe:1.16.0' + testImplementation libs.testcontainers.oracle.xe integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-oracle') diff --git a/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleDestination.java b/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleDestination.java index 8259258686d07..ce3a1c6b0a4e2 100644 --- a/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleDestination.java +++ b/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleDestination.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.base.Destination; import io.airbyte.integrations.base.IntegrationRunner; import io.airbyte.integrations.base.JavaBaseConstants; @@ -29,7 +30,7 @@ public class OracleDestination extends AbstractJdbcDestination implements Destin public static final List HOST_KEY = List.of("host"); public static final List PORT_KEY = List.of("port"); - public static final String DRIVER_CLASS = "oracle.jdbc.OracleDriver"; + public static final String DRIVER_CLASS = DatabaseDriver.ORACLE.getDriverClassName(); public static final String COLUMN_NAME_AB_ID = "\"" + JavaBaseConstants.COLUMN_NAME_AB_ID.toUpperCase() + "\""; diff --git a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/NneOracleDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/NneOracleDestinationAcceptanceTest.java index 6979448920699..cb8c9200873d7 100644 --- a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/NneOracleDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/NneOracleDestinationAcceptanceTest.java @@ -11,7 +11,9 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import java.sql.SQLException; import java.util.List; @@ -30,14 +32,18 @@ public void testEncryption() throws SQLException { .put("encryption_algorithm", algorithm) .build())); - final JdbcDatabase database = Databases.createJdbcDatabase(config.get("username").asText(), - config.get("password").asText(), - String.format("jdbc:oracle:thin:@//%s:%s/%s", - config.get("host").asText(), - config.get("port").asText(), - config.get("sid").asText()), - "oracle.jdbc.driver.OracleDriver", - getAdditionalProperties(algorithm)); + final JdbcDatabase database = new DefaultJdbcDatabase( + DataSourceFactory.create( + config.get("username").asText(), + config.get("password").asText(), + DatabaseDriver.ORACLE.getDriverClassName(), + String.format(DatabaseDriver.ORACLE.getUrlFormatString(), + config.get("host").asText(), + config.get("port").asInt(), + config.get("sid").asText()), + getAdditionalProperties(algorithm) + ) + ); final String networkServiceBanner = "select network_service_banner from v$session_connect_info where sid in (select distinct sid from v$mystat)"; @@ -63,14 +69,18 @@ public void testCheckProtocol() throws SQLException { final String algorithm = clone.get("encryption") .get("encryption_algorithm").asText(); - final JdbcDatabase database = Databases.createJdbcDatabase(clone.get("username").asText(), - clone.get("password").asText(), - String.format("jdbc:oracle:thin:@//%s:%s/%s", - clone.get("host").asText(), - clone.get("port").asText(), - clone.get("sid").asText()), - "oracle.jdbc.driver.OracleDriver", - getAdditionalProperties(algorithm)); + final JdbcDatabase database = new DefaultJdbcDatabase( + DataSourceFactory.create( + clone.get("username").asText(), + clone.get("password").asText(), + DatabaseDriver.ORACLE.getDriverClassName(), + String.format(DatabaseDriver.ORACLE.getUrlFormatString(), + clone.get("host").asText(), + clone.get("port").asInt(), + clone.get("sid").asText()), + getAdditionalProperties(algorithm) + ) + ); final String networkServiceBanner = "SELECT sys_context('USERENV', 'NETWORK_PROTOCOL') as network_protocol FROM dual"; final List collect = database.queryJsons(networkServiceBanner); diff --git a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/SshOracleDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/SshOracleDestinationAcceptanceTest.java index 133a44263c251..048f1d4773f8d 100644 --- a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/SshOracleDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/SshOracleDestinationAcceptanceTest.java @@ -10,7 +10,8 @@ import io.airbyte.commons.functional.CheckedFunction; import io.airbyte.commons.json.Jsons; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.integrations.base.JavaBaseConstants; import io.airbyte.integrations.base.ssh.SshBastionContainer; @@ -22,6 +23,7 @@ import java.util.List; import java.util.Objects; import java.util.stream.Collectors; +import org.jooq.DSLContext; import org.testcontainers.containers.Network; public abstract class SshOracleDestinationAcceptanceTest extends DestinationAcceptanceTest { @@ -155,15 +157,13 @@ private void initAndStartJdbcContainer() { } private Database getDatabaseFromConfig(final JsonNode config) { - return Databases.createDatabase( - config.get("username").asText(), - config.get("password").asText(), - String.format("jdbc:oracle:thin:@//%s:%s/%s", + final DSLContext dslContext = DSLContextFactory.create( + config.get("username").asText(), config.get("password").asText(), DatabaseDriver.ORACLE.getDriverClassName(), + String.format(DatabaseDriver.ORACLE.getUrlFormatString(), config.get("host").asText(), - config.get("port").asText(), - config.get("sid").asText()), - "oracle.jdbc.driver.OracleDriver", - null); + config.get("port").asInt(), + config.get("sid").asText()), null); + return new Database(dslContext); } @Override diff --git a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/UnencryptedOracleDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/UnencryptedOracleDestinationAcceptanceTest.java index 1ec98f5aab56e..268320d64ab0b 100644 --- a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/UnencryptedOracleDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/UnencryptedOracleDestinationAcceptanceTest.java @@ -12,7 +12,10 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.string.Strings; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.integrations.destination.ExtendedNameTransformer; @@ -21,6 +24,8 @@ import java.sql.SQLException; import java.util.List; import java.util.stream.Collectors; +import javax.sql.DataSource; +import org.jooq.DSLContext; import org.junit.Test; public class UnencryptedOracleDestinationAcceptanceTest extends DestinationAcceptanceTest { @@ -130,15 +135,13 @@ private List retrieveRecordsFromTable(final String tableName, final St } private static Database getDatabase(final JsonNode config) { - return Databases.createDatabase( - config.get("username").asText(), - config.get("password").asText(), - String.format("jdbc:oracle:thin:@//%s:%s/%s", + final DSLContext dslContext = DSLContextFactory.create( + config.get("username").asText(), config.get("password").asText(), DatabaseDriver.ORACLE.getDriverClassName(), + String.format(DatabaseDriver.ORACLE.getUrlFormatString(), config.get("host").asText(), - config.get("port").asText(), - config.get("sid").asText()), - "oracle.jdbc.driver.OracleDriver", - null); + config.get("port").asInt(), + config.get("sid").asText()), null); + return new Database(dslContext); } @Override @@ -157,8 +160,6 @@ protected void setup(final TestDestinationEnv testEnv) throws Exception { ctx -> ctx.fetch(String.format("CREATE USER %s IDENTIFIED BY %s", schemaName, schemaName))); database.query(ctx -> ctx.fetch(String.format("GRANT ALL PRIVILEGES TO %s", schemaName))); - database.close(); - ((ObjectNode) config).put("schema", dbName); } @@ -172,13 +173,13 @@ protected void tearDown(final TestDestinationEnv testEnv) { public void testNoneEncryption() throws SQLException { final JsonNode config = getConfig(); - final JdbcDatabase database = Databases.createJdbcDatabase(config.get("username").asText(), - config.get("password").asText(), - String.format("jdbc:oracle:thin:@//%s:%s/%s", - config.get("host").asText(), - config.get("port").asText(), - config.get("sid").asText()), - "oracle.jdbc.driver.OracleDriver"); + final DataSource dataSource = + DataSourceFactory.create(config.get("username").asText(), config.get("password").asText(), DatabaseDriver.ORACLE.getDriverClassName(), + String.format(DatabaseDriver.ORACLE.getUrlFormatString(), + config.get("host").asText(), + config.get("port").asInt(), + config.get("sid").asText())); + final JdbcDatabase database = new DefaultJdbcDatabase(dataSource); final String networkServiceBanner = "select network_service_banner from v$session_connect_info where sid in (select distinct sid from v$mystat)"; diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/build.gradle b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/build.gradle index ba26bb6486fd1..cc1e21d55da95 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/build.gradle @@ -18,7 +18,7 @@ dependencies { integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') - integrationTestJavaImplementation "org.testcontainers:postgresql:1.15.3" + integrationTestJavaImplementation libs.testcontainers.postgresql implementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) integrationTestJavaImplementation files(project(':airbyte-integrations:bases:base-normalization').airbyteDocker.outputs) diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/postgres/PostgresDestinationStrictEncryptAcceptanceTest.java b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/postgres/PostgresDestinationStrictEncryptAcceptanceTest.java index ce71fbf7ec97c..7861214ca8b93 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/postgres/PostgresDestinationStrictEncryptAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/postgres/PostgresDestinationStrictEncryptAcceptanceTest.java @@ -7,7 +7,9 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; -import io.airbyte.db.Databases; +import io.airbyte.db.Database; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.integrations.base.JavaBaseConstants; import io.airbyte.integrations.destination.ExtendedNameTransformer; @@ -16,6 +18,7 @@ import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; +import org.jooq.SQLDialect; import org.testcontainers.containers.PostgreSQLContainer; import org.testcontainers.utility.DockerImageName; @@ -109,8 +112,15 @@ protected List resolveIdentifier(final String identifier) { } private List retrieveRecordsFromTable(final String tableName, final String schemaName) throws SQLException { - return Databases.createPostgresDatabase(db.getUsername(), db.getPassword(), - db.getJdbcUrl()).query( + return new Database( + DSLContextFactory.create( + db.getUsername(), + db.getPassword(), + DatabaseDriver.POSTGRESQL.getDriverClassName(), + db.getJdbcUrl(), + SQLDialect.POSTGRES + ) + ).query( ctx -> ctx .fetch(String.format("SELECT * FROM %s.%s ORDER BY %s ASC;", schemaName, tableName, JavaBaseConstants.COLUMN_NAME_EMITTED_AT)) .stream() diff --git a/airbyte-integrations/connectors/destination-postgres/build.gradle b/airbyte-integrations/connectors/destination-postgres/build.gradle index b15cb0f9fe98a..487e065d86cc5 100644 --- a/airbyte-integrations/connectors/destination-postgres/build.gradle +++ b/airbyte-integrations/connectors/destination-postgres/build.gradle @@ -17,12 +17,12 @@ dependencies { testImplementation project(':airbyte-test-utils') - testImplementation "org.testcontainers:postgresql:1.15.3" + testImplementation libs.testcontainers.postgresql integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-postgres') - integrationTestJavaImplementation "org.testcontainers:postgresql:1.15.3" + integrationTestJavaImplementation libs.testcontainers.postgresql implementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) integrationTestJavaImplementation files(project(':airbyte-integrations:bases:base-normalization').airbyteDocker.outputs) diff --git a/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresDestination.java b/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresDestination.java index 5d1daf58e7283..0f90fad625008 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresDestination.java +++ b/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresDestination.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.base.Destination; import io.airbyte.integrations.base.IntegrationRunner; import io.airbyte.integrations.base.ssh.SshWrappedDestination; @@ -22,7 +23,7 @@ public class PostgresDestination extends AbstractJdbcDestination implements Dest private static final Logger LOGGER = LoggerFactory.getLogger(PostgresDestination.class); - public static final String DRIVER_CLASS = "org.postgresql.Driver"; + public static final String DRIVER_CLASS = DatabaseDriver.POSTGRESQL.getDriverClassName(); public static final List HOST_KEY = List.of("host"); public static final List PORT_KEY = List.of("port"); public static final String DATABASE_KEY = "database"; diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/java/io/airbyte/integrations/destination/postgres/PostgresDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-postgres/src/test-integration/java/io/airbyte/integrations/destination/postgres/PostgresDestinationAcceptanceTest.java index 00a8767eee0dd..643b8bdeac20c 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/test-integration/java/io/airbyte/integrations/destination/postgres/PostgresDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/java/io/airbyte/integrations/destination/postgres/PostgresDestinationAcceptanceTest.java @@ -7,7 +7,9 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; -import io.airbyte.db.Databases; +import io.airbyte.db.Database; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.base.JavaBaseConstants; import io.airbyte.integrations.destination.ExtendedNameTransformer; import io.airbyte.integrations.standardtest.destination.JdbcDestinationAcceptanceTest; @@ -15,6 +17,7 @@ import java.sql.SQLException; import java.util.List; import java.util.stream.Collectors; +import org.jooq.SQLDialect; import org.testcontainers.containers.PostgreSQLContainer; public class PostgresDestinationAcceptanceTest extends JdbcDestinationAcceptanceTest { @@ -108,8 +111,15 @@ protected List retrieveNormalizedRecords(final TestDestinationEnv env, } private List retrieveRecordsFromTable(final String tableName, final String schemaName) throws SQLException { - return Databases.createPostgresDatabase(db.getUsername(), db.getPassword(), - db.getJdbcUrl()).query(ctx -> { + return new Database( + DSLContextFactory.create( + db.getUsername(), + db.getPassword(), + DatabaseDriver.POSTGRESQL.getDriverClassName(), + db.getJdbcUrl(), + SQLDialect.POSTGRES + ) + ).query(ctx -> { ctx.execute("set time zone 'UTC';"); return ctx.fetch(String.format("SELECT * FROM %s.%s ORDER BY %s ASC;", schemaName, tableName, JavaBaseConstants.COLUMN_NAME_EMITTED_AT)) .stream() diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/java/io/airbyte/integrations/destination/postgres/SshPostgresDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-postgres/src/test-integration/java/io/airbyte/integrations/destination/postgres/SshPostgresDestinationAcceptanceTest.java index 1f15f7b15755d..e938456b230b3 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/test-integration/java/io/airbyte/integrations/destination/postgres/SshPostgresDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/java/io/airbyte/integrations/destination/postgres/SshPostgresDestinationAcceptanceTest.java @@ -9,7 +9,8 @@ import io.airbyte.commons.functional.CheckedFunction; import io.airbyte.commons.json.Jsons; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.base.JavaBaseConstants; import io.airbyte.integrations.base.ssh.SshBastionContainer; import io.airbyte.integrations.base.ssh.SshTunnel; @@ -19,6 +20,7 @@ import java.util.List; import java.util.stream.Collectors; import org.apache.commons.lang3.RandomStringUtils; +import org.jooq.SQLDialect; import org.testcontainers.containers.PostgreSQLContainer; // todo (cgardens) - likely some of this could be further de-duplicated with @@ -109,11 +111,18 @@ protected List retrieveNormalizedRecords(final TestDestinationEnv env, } private static Database getDatabaseFromConfig(final JsonNode config) { - return Databases.createPostgresDatabase( - config.get("username").asText(), - config.get("password").asText(), - String.format("jdbc:postgresql://%s:%s/%s", config.get("host").asText(), config.get("port").asText(), - config.get("database").asText())); + return new Database( + DSLContextFactory.create( + config.get("username").asText(), + config.get("password").asText(), + DatabaseDriver.POSTGRESQL.getDriverClassName(), + String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), + config.get("host").asText(), + config.get("port").asInt(), + config.get("database").asText()), + SQLDialect.POSTGRES + ) + ); } private List retrieveRecordsFromTable(final String tableName, final String schemaName) throws Exception { diff --git a/airbyte-integrations/connectors/destination-postgres/src/test/java/io/airbyte/integrations/destination/postgres/PostgresDestinationTest.java b/airbyte-integrations/connectors/destination-postgres/src/test/java/io/airbyte/integrations/destination/postgres/PostgresDestinationTest.java index db9082f1371c1..2bcece2a15ee9 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/test/java/io/airbyte/integrations/destination/postgres/PostgresDestinationTest.java +++ b/airbyte-integrations/connectors/destination-postgres/src/test/java/io/airbyte/integrations/destination/postgres/PostgresDestinationTest.java @@ -148,7 +148,7 @@ void sanityTest() throws Exception { .withState(new AirbyteStateMessage().withData(Jsons.jsonNode(ImmutableMap.of(SCHEMA_NAME + "." + STREAM_NAME, 10))))); consumer.close(); - final JdbcDatabase database = PostgreSQLContainerHelper.getJdbcDatabaseFromConfig(config); + final JdbcDatabase database = PostgreSQLContainerHelper.getJdbcDatabaseFromConfig(PostgreSQLContainerHelper.getDataSourceFromConfig(config)); final List actualRecords = database.bufferedResultSetQuery( connection -> connection.createStatement().executeQuery("SELECT * FROM public._airbyte_raw_id_and_name;"), diff --git a/airbyte-integrations/connectors/destination-pulsar/build.gradle b/airbyte-integrations/connectors/destination-pulsar/build.gradle index bb261f2031814..6b5c89fdf9aae 100644 --- a/airbyte-integrations/connectors/destination-pulsar/build.gradle +++ b/airbyte-integrations/connectors/destination-pulsar/build.gradle @@ -18,7 +18,7 @@ dependencies { implementation 'org.apache.pulsar:pulsar-client:2.8.1' - testImplementation "org.testcontainers:pulsar:1.16.2" + testImplementation libs.testcontainers.pulsar integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-pulsar') diff --git a/airbyte-integrations/connectors/destination-redis/build.gradle b/airbyte-integrations/connectors/destination-redis/build.gradle index 664bf8c4611db..d06899cc8afdf 100644 --- a/airbyte-integrations/connectors/destination-redis/build.gradle +++ b/airbyte-integrations/connectors/destination-redis/build.gradle @@ -11,7 +11,6 @@ application { def redisDriver = '3.7.0' def assertVersion = '3.21.0' -def testContainersVersion = '1.16.2' dependencies { implementation project(':airbyte-config:models') @@ -29,7 +28,7 @@ dependencies { // https://mvnrepository.com/artifact/org.assertj/assertj-core testImplementation "org.assertj:assertj-core:${assertVersion}" // https://mvnrepository.com/artifact/org.testcontainers/testcontainers - testImplementation "org.testcontainers:testcontainers:${testContainersVersion}" + testImplementation libs.testcontainers integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-redis') diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftInsertDestination.java b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftInsertDestination.java index 7dee925f655c0..576323918e64e 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftInsertDestination.java +++ b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftInsertDestination.java @@ -7,7 +7,9 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.destination.jdbc.AbstractJdbcDestination; import io.airbyte.integrations.destination.redshift.enums.RedshiftDataTmpTableMode; @@ -16,7 +18,7 @@ public class RedshiftInsertDestination extends AbstractJdbcDestination { - private static final String DRIVER_CLASS = "com.amazon.redshift.jdbc.Driver"; + private static final String DRIVER_CLASS = DatabaseDriver.REDSHIFT.getDriverClassName(); private static final String USERNAME = "username"; private static final String PASSWORD = "password"; private static final String SCHEMA = "schema"; @@ -47,12 +49,14 @@ protected Map getDefaultConnectionProperties(final JsonNode conf public static JdbcDatabase getJdbcDatabase(final JsonNode config) { final var jdbcConfig = RedshiftInsertDestination.getJdbcConfig(config); - return Databases.createJdbcDatabase( + return new DefaultJdbcDatabase( + DataSourceFactory.create( jdbcConfig.get(USERNAME).asText(), jdbcConfig.has(PASSWORD) ? jdbcConfig.get(PASSWORD).asText() : null, - jdbcConfig.get(JDBC_URL).asText(), RedshiftInsertDestination.DRIVER_CLASS, - SSL_JDBC_PARAMETERS); + jdbcConfig.get(JDBC_URL).asText(), + SSL_JDBC_PARAMETERS) + ); } public static JsonNode getJdbcConfig(final JsonNode redshiftConfig) { diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftCopyDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftCopyDestinationAcceptanceTest.java index c0187c793fe37..bd5ea0c3d2567 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftCopyDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftCopyDestinationAcceptanceTest.java @@ -11,7 +11,8 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.string.Strings; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.base.JavaBaseConstants; import io.airbyte.integrations.standardtest.destination.JdbcDestinationAcceptanceTest; import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; @@ -148,15 +149,19 @@ protected void tearDown(final TestDestinationEnv testEnv) throws Exception { } protected Database getDatabase() { - return Databases.createDatabase( - baseConfig.get("username").asText(), - baseConfig.get("password").asText(), - String.format("jdbc:redshift://%s:%s/%s", - baseConfig.get("host").asText(), - baseConfig.get("port").asText(), - baseConfig.get("database").asText()), - "com.amazon.redshift.jdbc.Driver", null, - RedshiftInsertDestination.SSL_JDBC_PARAMETERS); + return new Database( + DSLContextFactory.create( + baseConfig.get("username").asText(), + baseConfig.get("password").asText(), + DatabaseDriver.REDSHIFT.getDriverClassName(), + String.format(DatabaseDriver.REDSHIFT.getUrlFormatString(), + baseConfig.get("host").asText(), + baseConfig.get("port").asInt(), + baseConfig.get("database").asText()), + null, + RedshiftInsertDestination.SSL_JDBC_PARAMETERS + ) + ); } public RedshiftSQLNameTransformer getNamingResolver() { diff --git a/airbyte-integrations/connectors/destination-s3/build.gradle b/airbyte-integrations/connectors/destination-s3/build.gradle index 9882a51d6cd7d..0ffaf055f2730 100644 --- a/airbyte-integrations/connectors/destination-s3/build.gradle +++ b/airbyte-integrations/connectors/destination-s3/build.gradle @@ -28,6 +28,7 @@ dependencies { implementation group: 'com.github.airbytehq', name: 'json-avro-converter', version: '1.0.1' testImplementation 'org.apache.commons:commons-lang3:3.11' + testImplementation 'org.xerial.snappy:snappy-java:1.1.8.4' testImplementation "org.mockito:mockito-inline:4.1.0" integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') diff --git a/airbyte-integrations/connectors/destination-scylla/build.gradle b/airbyte-integrations/connectors/destination-scylla/build.gradle index e1a9d6aab8d87..0fceee3916266 100644 --- a/airbyte-integrations/connectors/destination-scylla/build.gradle +++ b/airbyte-integrations/connectors/destination-scylla/build.gradle @@ -11,7 +11,6 @@ application { def scyllaDriver = '3.10.2-scylla-1' def assertVersion = '3.21.0' -def testContainersVersion = '1.16.2' dependencies { implementation project(':airbyte-config:models') @@ -24,7 +23,7 @@ dependencies { // https://mvnrepository.com/artifact/org.assertj/assertj-core testImplementation "org.assertj:assertj-core:${assertVersion}" // https://mvnrepository.com/artifact/org.testcontainers/testcontainers - testImplementation "org.testcontainers:testcontainers:${testContainersVersion}" + testImplementation libs.testcontainers diff --git a/airbyte-integrations/connectors/source-bigquery/src/main/java/io/airbyte/integrations/source/bigquery/BigQuerySource.java b/airbyte-integrations/connectors/source-bigquery/src/main/java/io/airbyte/integrations/source/bigquery/BigQuerySource.java index c1687551b92dc..9507b5ae3f5f3 100644 --- a/airbyte-integrations/connectors/source-bigquery/src/main/java/io/airbyte/integrations/source/bigquery/BigQuerySource.java +++ b/airbyte-integrations/connectors/source-bigquery/src/main/java/io/airbyte/integrations/source/bigquery/BigQuerySource.java @@ -13,7 +13,6 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.util.AutoCloseableIterator; import io.airbyte.commons.util.AutoCloseableIterators; -import io.airbyte.db.Databases; import io.airbyte.db.SqlDatabase; import io.airbyte.db.bigquery.BigQueryDatabase; import io.airbyte.db.bigquery.BigQuerySourceOperations; @@ -60,7 +59,7 @@ public JsonNode toDatabaseConfig(final JsonNode config) { @Override protected BigQueryDatabase createDatabase(final JsonNode config) { dbConfig = Jsons.clone(config); - return Databases.createBigQueryDatabase(config.get(CONFIG_PROJECT_ID).asText(), config.get(CONFIG_CREDS).asText()); + return new BigQueryDatabase(config.get(CONFIG_PROJECT_ID).asText(), config.get(CONFIG_CREDS).asText()); } @Override diff --git a/airbyte-integrations/connectors/source-clickhouse-strict-encrypt/build.gradle b/airbyte-integrations/connectors/source-clickhouse-strict-encrypt/build.gradle index 1d9cf41852f91..3d492c69775e6 100644 --- a/airbyte-integrations/connectors/source-clickhouse-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/source-clickhouse-strict-encrypt/build.gradle @@ -24,5 +24,5 @@ dependencies { integrationTestJavaImplementation project(':airbyte-integrations:connectors:source-clickhouse') integrationTestJavaImplementation project(':airbyte-integrations:connectors:source-clickhouse-strict-encrypt') integrationTestJavaImplementation testFixtures(project(':airbyte-integrations:connectors:source-jdbc')) - integrationTestJavaImplementation "org.testcontainers:clickhouse:1.16.0" + integrationTestJavaImplementation libs.testcontainers.clickhouse } diff --git a/airbyte-integrations/connectors/source-clickhouse-strict-encrypt/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/ClickHouseStrictEncryptJdbcSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-clickhouse-strict-encrypt/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/ClickHouseStrictEncryptJdbcSourceAcceptanceTest.java index f812ed9c079a5..3e17118b54653 100644 --- a/airbyte-integrations/connectors/source-clickhouse-strict-encrypt/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/ClickHouseStrictEncryptJdbcSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-clickhouse-strict-encrypt/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/ClickHouseStrictEncryptJdbcSourceAcceptanceTest.java @@ -12,7 +12,8 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.resources.MoreResources; import io.airbyte.commons.string.Strings; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.base.Source; import io.airbyte.integrations.base.ssh.SshHelpers; @@ -79,13 +80,16 @@ public void setup() throws Exception { .put("password", "") .build()); - db = Databases.createJdbcDatabase( - configWithoutDbName.get("username").asText(), - configWithoutDbName.get("password").asText(), - String.format("jdbc:clickhouse://%s:%s?ssl=true&sslmode=none", - configWithoutDbName.get("host").asText(), - configWithoutDbName.get("port").asText()), - ClickHouseSource.DRIVER_CLASS); + db = new DefaultJdbcDatabase( + DataSourceFactory.create( + configWithoutDbName.get("username").asText(), + configWithoutDbName.get("password").asText(), + ClickHouseSource.DRIVER_CLASS, + String.format("jdbc:clickhouse://%s:%s?ssl=true&sslmode=none", + configWithoutDbName.get("host").asText(), + configWithoutDbName.get("port").asText()) + ) + ); dbName = Strings.addRandomSuffix("db", "_", 10).toLowerCase(); diff --git a/airbyte-integrations/connectors/source-clickhouse/build.gradle b/airbyte-integrations/connectors/source-clickhouse/build.gradle index d76ebb9a08841..792089a2874d3 100644 --- a/airbyte-integrations/connectors/source-clickhouse/build.gradle +++ b/airbyte-integrations/connectors/source-clickhouse/build.gradle @@ -22,5 +22,5 @@ dependencies { integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-source-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:source-clickhouse') integrationTestJavaImplementation testFixtures(project(':airbyte-integrations:connectors:source-jdbc')) - integrationTestJavaImplementation "org.testcontainers:clickhouse:1.16.0" + integrationTestJavaImplementation libs.testcontainers.clickhouse } diff --git a/airbyte-integrations/connectors/source-clickhouse/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractSshClickHouseSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-clickhouse/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractSshClickHouseSourceAcceptanceTest.java index 64e8bd5eac6ff..cc32d0c756e3d 100644 --- a/airbyte-integrations/connectors/source-clickhouse/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractSshClickHouseSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-clickhouse/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractSshClickHouseSourceAcceptanceTest.java @@ -7,7 +7,9 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.Lists; import io.airbyte.commons.json.Jsons; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.integrations.base.ssh.SshBastionContainer; @@ -100,14 +102,17 @@ private void initAndStartJdbcContainer() { } private static void populateDatabaseTestData() throws Exception { - final JdbcDatabase database = Databases.createJdbcDatabase( - config.get("username").asText(), - config.get("password").asText(), - String.format("jdbc:clickhouse://%s:%s/%s", - config.get("host").asText(), - config.get("port").asText(), - config.get("database").asText()), - ClickHouseSource.DRIVER_CLASS); + final JdbcDatabase database = new DefaultJdbcDatabase( + DataSourceFactory.create( + config.get("username").asText(), + config.get("password").asText(), + ClickHouseSource.DRIVER_CLASS, + String.format(DatabaseDriver.CLICKHOUSE.getUrlFormatString(), + config.get("host").asText(), + config.get("port").asInt(), + config.get("database").asText()) + ) + ); final String table1 = JdbcUtils.getFullyQualifiedTableName(SCHEMA_NAME, STREAM_NAME); final String createTable1 = diff --git a/airbyte-integrations/connectors/source-clickhouse/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/ClickHouseSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-clickhouse/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/ClickHouseSourceAcceptanceTest.java index 5c8b8a51ce4ad..fcc1c1cc28da5 100644 --- a/airbyte-integrations/connectors/source-clickhouse/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/ClickHouseSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-clickhouse/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/ClickHouseSourceAcceptanceTest.java @@ -8,7 +8,9 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import io.airbyte.commons.json.Jsons; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.integrations.base.ssh.SshHelpers; @@ -91,14 +93,17 @@ protected void setupEnvironment(final TestDestinationEnv environment) throws Exc .put("ssl", false) .build()); - final JdbcDatabase database = Databases.createJdbcDatabase( - config.get("username").asText(), - config.get("password").asText(), - String.format("jdbc:clickhouse://%s:%s/%s", - config.get("host").asText(), - config.get("port").asText(), - config.get("database").asText()), - ClickHouseSource.DRIVER_CLASS); + final JdbcDatabase database = new DefaultJdbcDatabase( + DataSourceFactory.create( + config.get("username").asText(), + config.get("password").asText(), + ClickHouseSource.DRIVER_CLASS, + String.format(DatabaseDriver.CLICKHOUSE.getUrlFormatString(), + config.get("host").asText(), + config.get("port").asInt(), + config.get("database").asText()) + ) + ); final String table1 = JdbcUtils.getFullyQualifiedTableName(SCHEMA_NAME, STREAM_NAME); final String createTable1 = diff --git a/airbyte-integrations/connectors/source-clickhouse/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SslClickHouseJdbcSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-clickhouse/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SslClickHouseJdbcSourceAcceptanceTest.java index bff60af47f9dc..ed6c07a6e2100 100644 --- a/airbyte-integrations/connectors/source-clickhouse/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SslClickHouseJdbcSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-clickhouse/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SslClickHouseJdbcSourceAcceptanceTest.java @@ -9,7 +9,8 @@ import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.string.Strings; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.source.clickhouse.ClickHouseSource; import io.airbyte.integrations.source.jdbc.AbstractJdbcSource; @@ -71,13 +72,16 @@ public void setup() throws Exception { .put("password", "") .build()); - db = Databases.createJdbcDatabase( - configWithoutDbName.get("username").asText(), - configWithoutDbName.get("password").asText(), - String.format("jdbc:clickhouse://%s:%s?ssl=true&sslmode=none", - configWithoutDbName.get("host").asText(), - configWithoutDbName.get("port").asText()), - ClickHouseSource.DRIVER_CLASS); + db = new DefaultJdbcDatabase( + DataSourceFactory.create( + config.get("username").asText(), + config.get("password").asText(), + ClickHouseSource.DRIVER_CLASS, + String.format("jdbc:clickhouse://%s:%d?ssl=true&sslmode=none", + config.get("host").asText(), + config.get("port").asInt()) + ) + ); dbName = Strings.addRandomSuffix("db", "_", 10).toLowerCase(); diff --git a/airbyte-integrations/connectors/source-cockroachdb-strict-encrypt/build.gradle b/airbyte-integrations/connectors/source-cockroachdb-strict-encrypt/build.gradle index bcb91e64cb7bd..fddeae3b249a8 100644 --- a/airbyte-integrations/connectors/source-cockroachdb-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/source-cockroachdb-strict-encrypt/build.gradle @@ -17,10 +17,10 @@ dependencies { implementation project(':airbyte-integrations:connectors:source-relational-db') implementation project(':airbyte-integrations:connectors:source-cockroachdb') - implementation "org.testcontainers:testcontainers:1.15.3" - implementation "org.testcontainers:jdbc:1.15.3" - implementation "org.testcontainers:cockroachdb:1.15.3" - implementation "org.postgresql:postgresql:42.3.1" + implementation libs.testcontainers + implementation libs.testcontainers.jdbc + implementation libs.testcontainers.cockroachdb + implementation libs.postgresql integrationTestJavaImplementation project(':airbyte-integrations:connectors:source-cockroachdb') integrationTestJavaImplementation project(':airbyte-integrations:connectors:source-cockroachdb-strict-encrypt') diff --git a/airbyte-integrations/connectors/source-cockroachdb-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/cockroachdb/CockroachDbEncryptSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-cockroachdb-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/cockroachdb/CockroachDbEncryptSourceAcceptanceTest.java index 87428ef83c71d..8e2cc86268017 100644 --- a/airbyte-integrations/connectors/source-cockroachdb-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/cockroachdb/CockroachDbEncryptSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-cockroachdb-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/cockroachdb/CockroachDbEncryptSourceAcceptanceTest.java @@ -10,7 +10,9 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.resources.MoreResources; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.base.ssh.SshHelpers; import io.airbyte.integrations.standardtest.source.SourceAcceptanceTest; import io.airbyte.integrations.standardtest.source.TestDestinationEnv; @@ -23,6 +25,8 @@ import io.airbyte.protocol.models.JsonSchemaType; import io.airbyte.protocol.models.SyncMode; import java.util.HashMap; +import javax.sql.DataSource; +import org.jooq.DSLContext; import org.jooq.SQLDialect; public class CockroachDbEncryptSourceAcceptanceTest extends SourceAcceptanceTest { @@ -46,27 +50,27 @@ protected void setupEnvironment(final TestDestinationEnv environment) throws Exc .put("password", "test_user") .build()); - final Database database = Databases.createDatabase( + try (final DSLContext dslContext = DSLContextFactory.create( config.get("username").asText(), config.get("password").asText(), - String.format("jdbc:postgresql://%s:%s/%s", + DatabaseDriver.POSTGRESQL.getDriverClassName(), + String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), config.get("host").asText(), - config.get("port").asText(), - config.get("database").asText()), - "org.postgresql.Driver", - SQLDialect.POSTGRES); - - database.query(ctx -> { - ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); - ctx.fetch( - "INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); - ctx.fetch("CREATE TABLE starships(id INTEGER, name VARCHAR(200));"); - ctx.fetch( - "INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); - return null; - }); - - database.close(); + config.get("port").asInt(), + config.get("database").asText()), SQLDialect.POSTGRES)){ + final Database database = new Database(dslContext); + + database.query(ctx -> { + ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); + ctx.fetch( + "INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); + ctx.fetch("CREATE TABLE starships(id INTEGER, name VARCHAR(200));"); + ctx.fetch( + "INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); + return null; + }); + } + } @Override diff --git a/airbyte-integrations/connectors/source-cockroachdb/build.gradle b/airbyte-integrations/connectors/source-cockroachdb/build.gradle index bcb93db191ea9..93e7d47c3bc7a 100644 --- a/airbyte-integrations/connectors/source-cockroachdb/build.gradle +++ b/airbyte-integrations/connectors/source-cockroachdb/build.gradle @@ -17,11 +17,11 @@ dependencies { implementation project(':airbyte-integrations:connectors:source-relational-db') implementation 'org.apache.commons:commons-lang3:3.11' - implementation "org.postgresql:postgresql:42.3.1" + implementation libs.postgresql testImplementation testFixtures(project(':airbyte-integrations:connectors:source-jdbc')) - testImplementation "org.testcontainers:cockroachdb:1.15.3" + testImplementation libs.testcontainers.cockroachdb testImplementation 'org.apache.commons:commons-lang3:3.11' integrationTestJavaImplementation project(':airbyte-integrations:connectors:source-cockroachdb') diff --git a/airbyte-integrations/connectors/source-cockroachdb/src/main/java/io/airbyte/integrations/source/cockroachdb/CockroachDbSource.java b/airbyte-integrations/connectors/source-cockroachdb/src/main/java/io/airbyte/integrations/source/cockroachdb/CockroachDbSource.java index 571091d404672..620d2dca347e4 100644 --- a/airbyte-integrations/connectors/source-cockroachdb/src/main/java/io/airbyte/integrations/source/cockroachdb/CockroachDbSource.java +++ b/airbyte-integrations/connectors/source-cockroachdb/src/main/java/io/airbyte/integrations/source/cockroachdb/CockroachDbSource.java @@ -9,7 +9,9 @@ import io.airbyte.commons.functional.CheckedFunction; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.util.AutoCloseableIterator; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.db.jdbc.streaming.AdaptiveStreamingQueryConfig; @@ -37,7 +39,7 @@ public class CockroachDbSource extends AbstractJdbcSource { private static final Logger LOGGER = LoggerFactory.getLogger(CockroachDbSource.class); - static final String DRIVER_CLASS = "org.postgresql.Driver"; + static final String DRIVER_CLASS = DatabaseDriver.POSTGRESQL.getDriverClassName(); public static final List HOST_KEY = List.of("host"); public static final List PORT_KEY = List.of("port"); @@ -116,13 +118,16 @@ protected boolean isNotInternalSchema(final JsonNode jsonNode, final Set public JdbcDatabase createDatabase(final JsonNode config) throws SQLException { final JsonNode jdbcConfig = toDatabaseConfig(config); - final JdbcDatabase database = Databases.createJdbcDatabase( - jdbcConfig.get("username").asText(), - jdbcConfig.has("password") ? jdbcConfig.get("password").asText() : null, - jdbcConfig.get("jdbc_url").asText(), - driverClass, - JdbcUtils.parseJdbcParameters(jdbcConfig, "connection_properties"), - sourceOperations); + final JdbcDatabase database = new DefaultJdbcDatabase( + DataSourceFactory.create( + jdbcConfig.get("username").asText(), + jdbcConfig.has("password") ? jdbcConfig.get("password").asText() : null, + driverClass, + jdbcConfig.get("jdbc_url").asText(), + JdbcUtils.parseJdbcParameters(jdbcConfig, "connection_properties") + ), + sourceOperations + ); quoteString = (quoteString == null ? database.getMetaData().getIdentifierQuoteString() : quoteString); diff --git a/airbyte-integrations/connectors/source-cockroachdb/src/test-integration/java/io/airbyte/integrations/source/cockroachdb/CockroachDbSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-cockroachdb/src/test-integration/java/io/airbyte/integrations/source/cockroachdb/CockroachDbSourceAcceptanceTest.java index 472fae1798fe0..9f230810d166f 100644 --- a/airbyte-integrations/connectors/source-cockroachdb/src/test-integration/java/io/airbyte/integrations/source/cockroachdb/CockroachDbSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-cockroachdb/src/test-integration/java/io/airbyte/integrations/source/cockroachdb/CockroachDbSourceAcceptanceTest.java @@ -10,7 +10,9 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.resources.MoreResources; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.standardtest.source.SourceAcceptanceTest; import io.airbyte.integrations.standardtest.source.TestDestinationEnv; import io.airbyte.protocol.models.CatalogHelpers; @@ -22,6 +24,8 @@ import io.airbyte.protocol.models.JsonSchemaType; import io.airbyte.protocol.models.SyncMode; import java.util.HashMap; +import javax.sql.DataSource; +import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.testcontainers.containers.CockroachContainer; @@ -48,27 +52,26 @@ protected void setupEnvironment(final TestDestinationEnv environment) throws Exc .put("ssl", false) .build()); - final Database database = Databases.createDatabase( + try (final DSLContext dslContext = DSLContextFactory.create( config.get("username").asText(), config.get("password").asText(), - String.format("jdbc:postgresql://%s:%s/%s", + DatabaseDriver.POSTGRESQL.getDriverClassName(), + String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), config.get("host").asText(), - config.get("port").asText(), - config.get("database").asText()), - "org.postgresql.Driver", - SQLDialect.POSTGRES); - - database.query(ctx -> { - ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); - ctx.fetch( - "INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); - ctx.fetch("CREATE TABLE starships(id INTEGER, name VARCHAR(200));"); - ctx.fetch( - "INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); - return null; - }); - - database.close(); + config.get("port").asInt(), + config.get("database").asText()), SQLDialect.POSTGRES)) { + final Database database = new Database(dslContext); + + database.query(ctx -> { + ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); + ctx.fetch( + "INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); + ctx.fetch("CREATE TABLE starships(id INTEGER, name VARCHAR(200));"); + ctx.fetch( + "INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); + return null; + }); + } } @Override diff --git a/airbyte-integrations/connectors/source-cockroachdb/src/test-integration/java/io/airbyte/integrations/source/cockroachdb/CockroachDbSourceDatatypeTest.java b/airbyte-integrations/connectors/source-cockroachdb/src/test-integration/java/io/airbyte/integrations/source/cockroachdb/CockroachDbSourceDatatypeTest.java index 57abee37cb843..497444a640dfd 100644 --- a/airbyte-integrations/connectors/source-cockroachdb/src/test-integration/java/io/airbyte/integrations/source/cockroachdb/CockroachDbSourceDatatypeTest.java +++ b/airbyte-integrations/connectors/source-cockroachdb/src/test-integration/java/io/airbyte/integrations/source/cockroachdb/CockroachDbSourceDatatypeTest.java @@ -8,12 +8,16 @@ import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.standardtest.source.AbstractSourceDatabaseTypeTest; import io.airbyte.integrations.standardtest.source.TestDataHolder; import io.airbyte.integrations.standardtest.source.TestDestinationEnv; import io.airbyte.protocol.models.JsonSchemaType; import java.sql.SQLException; +import javax.sql.DataSource; +import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -42,15 +46,15 @@ protected Database setupDatabase() throws SQLException { .build()); LOGGER.warn("PPP:config:" + config); - final Database database = Databases.createDatabase( + final DSLContext dslContext = DSLContextFactory.create( config.get("username").asText(), config.get("password").asText(), - String.format("jdbc:postgresql://%s:%s/%s", + DatabaseDriver.POSTGRESQL.getDriverClassName(), + String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), config.get("host").asText(), - config.get("port").asText(), - config.get("database").asText()), - "org.postgresql.Driver", - SQLDialect.POSTGRES); + config.get("port").asInt(), + config.get("database").asText()), SQLDialect.POSTGRES); + final Database database = new Database(dslContext); database.query(ctx -> ctx.fetch("CREATE SCHEMA TEST;")); database.query(ctx -> ctx.fetch("CREATE TYPE mood AS ENUM ('sad', 'ok', 'happy');")); diff --git a/airbyte-integrations/connectors/source-cockroachdb/src/test/java/io/airbyte/integrations/source/cockroachdb/CockroachDbJdbcSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-cockroachdb/src/test/java/io/airbyte/integrations/source/cockroachdb/CockroachDbJdbcSourceAcceptanceTest.java index 5cc4d38901dc8..a787f284aca69 100644 --- a/airbyte-integrations/connectors/source-cockroachdb/src/test/java/io/airbyte/integrations/source/cockroachdb/CockroachDbJdbcSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-cockroachdb/src/test/java/io/airbyte/integrations/source/cockroachdb/CockroachDbJdbcSourceAcceptanceTest.java @@ -15,7 +15,8 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.string.Strings; import io.airbyte.commons.util.MoreIterators; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.integrations.source.jdbc.AbstractJdbcSource; import io.airbyte.integrations.source.jdbc.test.JdbcSourceAcceptanceTest; @@ -81,13 +82,15 @@ public void setup() throws Exception { .build()); final JsonNode jdbcConfig = getToDatabaseConfigFunction().apply(config); - database = Databases.createJdbcDatabase( - jdbcConfig.get("username").asText(), - jdbcConfig.has("password") ? jdbcConfig.get("password").asText() : null, - jdbcConfig.get("jdbc_url").asText(), - getDriverClass(), - JdbcUtils.parseJdbcParameters(jdbcConfig, "connection_properties")); - + database = new DefaultJdbcDatabase( + DataSourceFactory.create( + jdbcConfig.get("username").asText(), + jdbcConfig.has("password") ? jdbcConfig.get("password").asText() : null, + getDriverClass(), + jdbcConfig.get("jdbc_url").asText(), + JdbcUtils.parseJdbcParameters(jdbcConfig, "connection_properties") + ) + ); database.execute(connection -> connection.createStatement().execute("CREATE DATABASE " + config.get("database") + ";")); super.setup(); } diff --git a/airbyte-integrations/connectors/source-cockroachdb/src/test/java/io/airbyte/integrations/source/cockroachdb/CockroachDbSourceTest.java b/airbyte-integrations/connectors/source-cockroachdb/src/test/java/io/airbyte/integrations/source/cockroachdb/CockroachDbSourceTest.java index 2099b5c4d27d7..e6290696e03b3 100644 --- a/airbyte-integrations/connectors/source-cockroachdb/src/test/java/io/airbyte/integrations/source/cockroachdb/CockroachDbSourceTest.java +++ b/airbyte-integrations/connectors/source-cockroachdb/src/test/java/io/airbyte/integrations/source/cockroachdb/CockroachDbSourceTest.java @@ -15,7 +15,9 @@ import io.airbyte.commons.string.Strings; import io.airbyte.commons.util.MoreIterators; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.protocol.models.AirbyteCatalog; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteMessage.Type; @@ -33,6 +35,8 @@ import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; +import javax.sql.DataSource; +import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; @@ -102,39 +106,43 @@ void setup() throws Exception { dbName = Strings.addRandomSuffix("db", "_", 10).toLowerCase(); final JsonNode config = getConfig(PSQL_DB, dbName); - final Database database = getDatabaseFromConfig(config); - database.query(ctx -> { - ctx.fetch("CREATE DATABASE " + dbName + ";"); - ctx.fetch( - "CREATE TABLE id_and_name(id NUMERIC(20, 10), name VARCHAR(200), power double precision, PRIMARY KEY (id));"); - ctx.fetch("CREATE INDEX i1 ON id_and_name (id);"); - ctx.fetch( - "INSERT INTO id_and_name (id, name, power) VALUES (1,'goku', 'Infinity'), (2, 'vegeta', 9000.1), ('NaN', 'piccolo', '-Infinity');"); - - ctx.fetch( - "CREATE TABLE id_and_name2(id NUMERIC(20, 10), name VARCHAR(200), power double precision);"); - ctx.fetch( - "INSERT INTO id_and_name2 (id, name, power) VALUES (1,'goku', 'Infinity'), (2, 'vegeta', 9000.1), ('NaN', 'piccolo', '-Infinity');"); - - ctx.fetch( - "CREATE TABLE names(first_name VARCHAR(200), last_name VARCHAR(200), power double precision, PRIMARY KEY (first_name, last_name));"); - ctx.fetch( - "INSERT INTO names (first_name, last_name, power) VALUES ('san', 'goku', 'Infinity'), ('prince', 'vegeta', 9000.1), ('piccolo', 'junior', '-Infinity');"); - return null; - }); - database.close(); + try (final DSLContext dslContext = getDslContext(config)) { + final Database database = getDatabase(dslContext); + database.query(ctx -> { + ctx.fetch("CREATE DATABASE " + dbName + ";"); + ctx.fetch( + "CREATE TABLE id_and_name(id NUMERIC(20, 10), name VARCHAR(200), power double precision, PRIMARY KEY (id));"); + ctx.fetch("CREATE INDEX i1 ON id_and_name (id);"); + ctx.fetch( + "INSERT INTO id_and_name (id, name, power) VALUES (1,'goku', 'Infinity'), (2, 'vegeta', 9000.1), ('NaN', 'piccolo', '-Infinity');"); + + ctx.fetch( + "CREATE TABLE id_and_name2(id NUMERIC(20, 10), name VARCHAR(200), power double precision);"); + ctx.fetch( + "INSERT INTO id_and_name2 (id, name, power) VALUES (1,'goku', 'Infinity'), (2, 'vegeta', 9000.1), ('NaN', 'piccolo', '-Infinity');"); + + ctx.fetch( + "CREATE TABLE names(first_name VARCHAR(200), last_name VARCHAR(200), power double precision, PRIMARY KEY (first_name, last_name));"); + ctx.fetch( + "INSERT INTO names (first_name, last_name, power) VALUES ('san', 'goku', 'Infinity'), ('prince', 'vegeta', 9000.1), ('piccolo', 'junior', '-Infinity');"); + return null; + }); + } + } + + private static Database getDatabase(final DSLContext dslContext) { + return new Database(dslContext); } - private static Database getDatabaseFromConfig(final JsonNode config) { - return Databases.createDatabase( + private static DSLContext getDslContext(final JsonNode config) { + return DSLContextFactory.create( config.get("username").asText(), config.get("password").asText(), - String.format("jdbc:postgresql://%s:%s/%s", + DatabaseDriver.POSTGRESQL.getDriverClassName(), + String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), config.get("host").asText(), - config.get("port").asText(), - config.get("database").asText()), - "org.postgresql.Driver", - SQLDialect.POSTGRES); + config.get("port").asInt(), + config.get("database").asText()), SQLDialect.POSTGRES); } private JsonNode getConfig(final CockroachContainer psqlDb, final String dbName) { @@ -169,7 +177,8 @@ public void testCanReadUtf8() throws Exception { // .withCommand("postgres -c client_encoding=sql_ascii") db.start(); final JsonNode config = getConfig(db); - try (final Database database = getDatabaseFromConfig(config)) { + try (final DSLContext dslContext = getDslContext(config)) { + final Database database = getDatabase(dslContext); database.query(ctx -> { ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); ctx.fetch( @@ -208,26 +217,28 @@ void testDiscoverWithPk() throws Exception { @Test void testDiscoverWithPermissions() throws Exception { - JsonNode config = getConfig(PSQL_DB, dbName); - final Database database = getDatabaseFromConfig(config); - database.query(ctx -> { - ctx.fetch( - "CREATE USER cock;"); - ctx.fetch( - "CREATE TABLE id_and_name_perm1(id NUMERIC(20, 10), name VARCHAR(200), power double precision, PRIMARY KEY (id));"); - ctx.fetch( - "CREATE TABLE id_and_name_perm2(id NUMERIC(20, 10), name VARCHAR(200), power double precision, PRIMARY KEY (id));"); - ctx.fetch( - "CREATE TABLE id_and_name_perm3(id NUMERIC(20, 10), name VARCHAR(200), power double precision, PRIMARY KEY (id));"); - ctx.fetch("grant all on database " + dbName + " to cock;"); - ctx.fetch("grant all on table " + dbName + ".public.id_and_name_perm1 to cock;"); - ctx.fetch("grant select on table " + dbName + ".public.id_and_name_perm2 to cock;"); - return null; - }); + final JsonNode config = getConfig(PSQL_DB, dbName); + try (final DSLContext dslContext = getDslContext(config)) { + final Database database = getDatabase(dslContext); + database.query(ctx -> { + ctx.fetch( + "CREATE USER cock;"); + ctx.fetch( + "CREATE TABLE id_and_name_perm1(id NUMERIC(20, 10), name VARCHAR(200), power double precision, PRIMARY KEY (id));"); + ctx.fetch( + "CREATE TABLE id_and_name_perm2(id NUMERIC(20, 10), name VARCHAR(200), power double precision, PRIMARY KEY (id));"); + ctx.fetch( + "CREATE TABLE id_and_name_perm3(id NUMERIC(20, 10), name VARCHAR(200), power double precision, PRIMARY KEY (id));"); + ctx.fetch("grant all on database " + dbName + " to cock;"); + ctx.fetch("grant all on table " + dbName + ".public.id_and_name_perm1 to cock;"); + ctx.fetch("grant select on table " + dbName + ".public.id_and_name_perm2 to cock;"); + return null; + }); + } - List expected = List.of("id_and_name_perm1", "id_and_name_perm2"); + final List expected = List.of("id_and_name_perm1", "id_and_name_perm2"); - AirbyteCatalog airbyteCatalog = new CockroachDbSource().discover(getConfig(PSQL_DB, dbName, "cock")); + final AirbyteCatalog airbyteCatalog = new CockroachDbSource().discover(getConfig(PSQL_DB, dbName, "cock")); final List actualNamesWithPermission = airbyteCatalog .getStreams() diff --git a/airbyte-integrations/connectors/source-db2-strict-encrypt/build.gradle b/airbyte-integrations/connectors/source-db2-strict-encrypt/build.gradle index a1fbedab5eb4b..883d5acaff7c2 100644 --- a/airbyte-integrations/connectors/source-db2-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/source-db2-strict-encrypt/build.gradle @@ -22,7 +22,7 @@ dependencies { testImplementation testFixtures(project(':airbyte-integrations:connectors:source-jdbc')) testImplementation project(':airbyte-test-utils') - testImplementation "org.testcontainers:db2:1.15.3" + testImplementation libs.testcontainers.db2 integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-source-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:source-db2') diff --git a/airbyte-integrations/connectors/source-db2-strict-encrypt/src/main/java/io/airbyte/integrations/source/db2_strict_encrypt/Db2StrictEncryptSource.java b/airbyte-integrations/connectors/source-db2-strict-encrypt/src/main/java/io/airbyte/integrations/source/db2_strict_encrypt/Db2StrictEncryptSource.java index e90d751cf02ef..c50258a6f38d2 100644 --- a/airbyte-integrations/connectors/source-db2-strict-encrypt/src/main/java/io/airbyte/integrations/source/db2_strict_encrypt/Db2StrictEncryptSource.java +++ b/airbyte-integrations/connectors/source-db2-strict-encrypt/src/main/java/io/airbyte/integrations/source/db2_strict_encrypt/Db2StrictEncryptSource.java @@ -6,6 +6,7 @@ import com.fasterxml.jackson.databind.node.ArrayNode; import io.airbyte.commons.json.Jsons; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.base.IntegrationRunner; import io.airbyte.integrations.base.Source; import io.airbyte.integrations.base.spec_modification.SpecModifyingSource; @@ -17,7 +18,7 @@ public class Db2StrictEncryptSource extends SpecModifyingSource implements Source { private static final Logger LOGGER = LoggerFactory.getLogger(Db2StrictEncryptSource.class); - public static final String DRIVER_CLASS = "com.ibm.db2.jcc.DB2Driver"; + public static final String DRIVER_CLASS = Db2Source.DRIVER_CLASS; public Db2StrictEncryptSource() { super(new Db2Source()); diff --git a/airbyte-integrations/connectors/source-db2-strict-encrypt/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/Db2StrictEncryptSourceCertificateAcceptanceTest.java b/airbyte-integrations/connectors/source-db2-strict-encrypt/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/Db2StrictEncryptSourceCertificateAcceptanceTest.java index a21c00c2f0100..50b56d3e53d45 100644 --- a/airbyte-integrations/connectors/source-db2-strict-encrypt/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/Db2StrictEncryptSourceCertificateAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-db2-strict-encrypt/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/Db2StrictEncryptSourceCertificateAcceptanceTest.java @@ -9,7 +9,8 @@ import com.google.common.collect.Lists; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.resources.MoreResources; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.source.db2.Db2Source; import io.airbyte.integrations.standardtest.source.SourceAcceptanceTest; @@ -120,11 +121,14 @@ protected void setupEnvironment(final TestDestinationEnv environment) throws Exc db.getMappedPort(50000), config.get("db").asText()) + SSL_CONFIG; - database = Databases.createJdbcDatabase( - config.get("username").asText(), - config.get("password").asText(), - jdbcUrl, - Db2Source.DRIVER_CLASS); + database = new DefaultJdbcDatabase( + DataSourceFactory.create( + config.get("username").asText(), + config.get("password").asText(), + Db2Source.DRIVER_CLASS, + jdbcUrl + ) + ); final String createSchemaQuery = String.format("CREATE SCHEMA %s", SCHEMA_NAME); final String createTableQuery1 = String diff --git a/airbyte-integrations/connectors/source-db2/build.gradle b/airbyte-integrations/connectors/source-db2/build.gradle index d5e33af7ac733..aae32ccedf779 100644 --- a/airbyte-integrations/connectors/source-db2/build.gradle +++ b/airbyte-integrations/connectors/source-db2/build.gradle @@ -21,7 +21,7 @@ dependencies { testImplementation testFixtures(project(':airbyte-integrations:connectors:source-jdbc')) testImplementation project(':airbyte-test-utils') - testImplementation "org.testcontainers:db2:1.15.3" + testImplementation libs.testcontainers.db2 integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-source-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:source-db2') diff --git a/airbyte-integrations/connectors/source-db2/src/main/java/io.airbyte.integrations.source.db2/Db2Source.java b/airbyte-integrations/connectors/source-db2/src/main/java/io.airbyte.integrations.source.db2/Db2Source.java index d817b10f6acfd..9264b08b5b25f 100644 --- a/airbyte-integrations/connectors/source-db2/src/main/java/io.airbyte.integrations.source.db2/Db2Source.java +++ b/airbyte-integrations/connectors/source-db2/src/main/java/io.airbyte.integrations.source.db2/Db2Source.java @@ -8,6 +8,7 @@ import com.google.common.collect.ImmutableMap; import io.airbyte.commons.functional.CheckedFunction; import io.airbyte.commons.json.Jsons; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.db.jdbc.streaming.AdaptiveStreamingQueryConfig; import io.airbyte.integrations.base.IntegrationRunner; @@ -34,7 +35,7 @@ public class Db2Source extends AbstractJdbcSource implements Source { private static final Logger LOGGER = LoggerFactory.getLogger(Db2Source.class); - public static final String DRIVER_CLASS = "com.ibm.db2.jcc.DB2Driver"; + public static final String DRIVER_CLASS = DatabaseDriver.DB2.getDriverClassName(); public static final String USERNAME = "username"; public static final String PASSWORD = "password"; @@ -54,9 +55,9 @@ public static void main(final String[] args) throws Exception { @Override public JsonNode toDatabaseConfig(final JsonNode config) { - final StringBuilder jdbcUrl = new StringBuilder(String.format("jdbc:db2://%s:%s/%s", + final StringBuilder jdbcUrl = new StringBuilder(String.format(DatabaseDriver.DB2.getUrlFormatString(), config.get("host").asText(), - config.get("port").asText(), + config.get("port").asInt(), config.get("db").asText())); var result = Jsons.jsonNode(ImmutableMap.builder() diff --git a/airbyte-integrations/connectors/source-db2/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/Db2SourceAcceptanceTest.java b/airbyte-integrations/connectors/source-db2/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/Db2SourceAcceptanceTest.java index d51fe0242c02a..9ba59ff17fa0c 100644 --- a/airbyte-integrations/connectors/source-db2/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/Db2SourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-db2/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/Db2SourceAcceptanceTest.java @@ -11,7 +11,9 @@ import com.google.common.collect.Lists; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.resources.MoreResources; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.source.db2.Db2Source; import io.airbyte.integrations.standardtest.source.SourceAcceptanceTest; @@ -62,7 +64,7 @@ protected JsonNode getConfig() { return config; } - private JsonNode getConfig(String userName, String password) { + private JsonNode getConfig(final String userName, final String password) { return Jsons.jsonNode(ImmutableMap.builder() .put("host", db.getHost()) .put("port", db.getFirstMappedPort()) @@ -111,14 +113,17 @@ protected void setupEnvironment(final TestDestinationEnv environment) throws Exc config = getConfig(db.getUsername(), db.getPassword()); - database = Databases.createJdbcDatabase( - config.get("username").asText(), - config.get("password").asText(), - String.format("jdbc:db2://%s:%s/%s", - config.get("host").asText(), - config.get("port").asText(), - config.get("db").asText()), - Db2Source.DRIVER_CLASS); + database = new DefaultJdbcDatabase( + DataSourceFactory.create( + config.get("username").asText(), + config.get("password").asText(), + Db2Source.DRIVER_CLASS, + String.format(DatabaseDriver.DB2.getUrlFormatString(), + config.get("host").asText(), + config.get("port").asInt(), + config.get("db").asText()) + ) + ); final String createSchemaQuery = String.format("CREATE SCHEMA %s", SCHEMA_NAME); final String createTableQuery1 = String @@ -160,10 +165,10 @@ protected void tearDown(final TestDestinationEnv testEnv) { @Test public void testCheckPrivilegesForUserWithLessPerm() throws Exception { createUser(LESS_PERMITTED_USER); - JsonNode config = getConfig(LESS_PERMITTED_USER, PASSWORD); + final JsonNode config = getConfig(LESS_PERMITTED_USER, PASSWORD); final List actualNamesWithPermission = getActualNamesWithPermission(config); - List expected = List.of(STREAM_NAME3, STREAM_NAME1); + final List expected = List.of(STREAM_NAME3, STREAM_NAME1); assertEquals(expected.size(), actualNamesWithPermission.size()); assertEquals(expected, actualNamesWithPermission); } @@ -172,21 +177,21 @@ public void testCheckPrivilegesForUserWithLessPerm() throws Exception { public void testCheckPrivilegesForUserWithoutPerm() throws Exception { createUser(USER_WITH_OUT_PERMISSIONS); - JsonNode config = getConfig(USER_WITH_OUT_PERMISSIONS, PASSWORD); + final JsonNode config = getConfig(USER_WITH_OUT_PERMISSIONS, PASSWORD); final List actualNamesWithPermission = getActualNamesWithPermission(config); - List expected = Collections.emptyList(); + final List expected = Collections.emptyList(); assertEquals(0, actualNamesWithPermission.size()); assertEquals(expected, actualNamesWithPermission); } - private void createUser(String lessPermittedUser) throws IOException, InterruptedException { - String encryptedPassword = db.execInContainer("openssl", "passwd", PASSWORD).getStdout().replaceAll("\n", ""); + private void createUser(final String lessPermittedUser) throws IOException, InterruptedException { + final String encryptedPassword = db.execInContainer("openssl", "passwd", PASSWORD).getStdout().replaceAll("\n", ""); db.execInContainer("useradd", lessPermittedUser, "-p", encryptedPassword); } - private List getActualNamesWithPermission(JsonNode config) throws Exception { - AirbyteCatalog airbyteCatalog = new Db2Source().discover(config); + private List getActualNamesWithPermission(final JsonNode config) throws Exception { + final AirbyteCatalog airbyteCatalog = new Db2Source().discover(config); return airbyteCatalog .getStreams() .stream() diff --git a/airbyte-integrations/connectors/source-db2/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/Db2SourceCertificateAcceptanceTest.java b/airbyte-integrations/connectors/source-db2/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/Db2SourceCertificateAcceptanceTest.java index f35bbb52e2095..0f11b1fa1fc93 100644 --- a/airbyte-integrations/connectors/source-db2/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/Db2SourceCertificateAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-db2/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/Db2SourceCertificateAcceptanceTest.java @@ -9,7 +9,8 @@ import com.google.common.collect.Lists; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.resources.MoreResources; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.source.db2.Db2Source; import io.airbyte.integrations.standardtest.source.SourceAcceptanceTest; @@ -119,11 +120,14 @@ protected void setupEnvironment(final TestDestinationEnv environment) throws Exc config.get("db").asText()) + ":sslConnection=true;sslTrustStoreLocation=" + KEY_STORE_FILE_PATH + ";sslTrustStorePassword=" + TEST_KEY_STORE_PASS + ";"; - database = Databases.createJdbcDatabase( - config.get("username").asText(), - config.get("password").asText(), - jdbcUrl, - Db2Source.DRIVER_CLASS); + database = new DefaultJdbcDatabase( + DataSourceFactory.create( + config.get("username").asText(), + config.get("password").asText(), + Db2Source.DRIVER_CLASS, + jdbcUrl + ) + ); final String createSchemaQuery = String.format("CREATE SCHEMA %s", SCHEMA_NAME); final String createTableQuery1 = String diff --git a/airbyte-integrations/connectors/source-db2/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/Db2SourceDatatypeTest.java b/airbyte-integrations/connectors/source-db2/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/Db2SourceDatatypeTest.java index 0efdb43f756a2..32f019085017c 100644 --- a/airbyte-integrations/connectors/source-db2/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/Db2SourceDatatypeTest.java +++ b/airbyte-integrations/connectors/source-db2/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/Db2SourceDatatypeTest.java @@ -8,12 +8,16 @@ import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.source.db2.Db2Source; import io.airbyte.integrations.standardtest.source.AbstractSourceDatabaseTypeTest; import io.airbyte.integrations.standardtest.source.TestDataHolder; import io.airbyte.integrations.standardtest.source.TestDestinationEnv; import io.airbyte.protocol.models.JsonSchemaType; +import javax.sql.DataSource; +import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.testcontainers.containers.Db2Container; @@ -56,15 +60,15 @@ protected Database setupDatabase() throws Exception { .build())) .build()); - final Database database = Databases.createDatabase( + final DSLContext dslContext = DSLContextFactory.create( config.get("username").asText(), config.get("password").asText(), - String.format("jdbc:db2://%s:%s/%s", - config.get("host").asText(), - config.get("port").asText(), - config.get("db").asText()), Db2Source.DRIVER_CLASS, - SQLDialect.DEFAULT); + String.format(DatabaseDriver.DB2.getUrlFormatString(), + config.get("host").asText(), + config.get("port").asInt(), + config.get("db").asText()), SQLDialect.DEFAULT); + final Database database = new Database(dslContext); database.query(ctx -> ctx.fetch("CREATE SCHEMA TEST")); diff --git a/airbyte-integrations/connectors/source-jdbc/build.gradle b/airbyte-integrations/connectors/source-jdbc/build.gradle index 44813fddb4dec..b6aa9b2290c0d 100644 --- a/airbyte-integrations/connectors/source-jdbc/build.gradle +++ b/airbyte-integrations/connectors/source-jdbc/build.gradle @@ -27,11 +27,11 @@ dependencies { testImplementation project(':airbyte-test-utils') - testImplementation "org.postgresql:postgresql:42.2.18" - testImplementation "org.testcontainers:postgresql:1.15.3" + testImplementation libs.postgresql + testImplementation libs.testcontainers.postgresql integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-source-test') - integrationTestJavaImplementation "org.testcontainers:postgresql:1.15.3" + integrationTestJavaImplementation libs.testcontainers.postgresql testFixturesImplementation "org.hamcrest:hamcrest-all:1.3" testFixturesImplementation project(':airbyte-protocol:models') diff --git a/airbyte-integrations/connectors/source-jdbc/src/main/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSource.java b/airbyte-integrations/connectors/source-jdbc/src/main/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSource.java index 0f2aeac953145..a3f8381b7ce74 100644 --- a/airbyte-integrations/connectors/source-jdbc/src/main/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSource.java +++ b/airbyte-integrations/connectors/source-jdbc/src/main/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSource.java @@ -25,11 +25,12 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.util.AutoCloseableIterator; import io.airbyte.commons.util.AutoCloseableIterators; -import io.airbyte.db.Databases; import io.airbyte.db.JdbcCompatibleSourceOperations; import io.airbyte.db.SqlDatabase; +import io.airbyte.db.factory.DataSourceFactory; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.db.jdbc.JdbcUtils; +import io.airbyte.db.jdbc.StreamingJdbcDatabase; import io.airbyte.db.jdbc.streaming.JdbcStreamingQueryConfig; import io.airbyte.integrations.base.Source; import io.airbyte.integrations.source.jdbc.dto.JdbcPrivilegeDto; @@ -289,15 +290,17 @@ public AutoCloseableIterator queryTableIncremental(final JdbcDatabase @Override public JdbcDatabase createDatabase(final JsonNode config) throws SQLException { final JsonNode jdbcConfig = toDatabaseConfig(config); - - final JdbcDatabase database = Databases.createStreamingJdbcDatabase( - jdbcConfig.has("username") ? jdbcConfig.get("username").asText() : null, - jdbcConfig.has("password") ? jdbcConfig.get("password").asText() : null, - jdbcConfig.get("jdbc_url").asText(), - driverClass, - streamingQueryConfigProvider, - JdbcUtils.parseJdbcParameters(jdbcConfig, "connection_properties", getJdbcParameterDelimiter()), - sourceOperations); + final JdbcDatabase database = new StreamingJdbcDatabase( + DataSourceFactory.create( + jdbcConfig.has("username") ? jdbcConfig.get("username").asText() : null, + jdbcConfig.has("password") ? jdbcConfig.get("password").asText() : null, + driverClass, + jdbcConfig.get("jdbc_url").asText(), + JdbcUtils.parseJdbcParameters(jdbcConfig, "connection_properties", getJdbcParameterDelimiter()) + ), + sourceOperations, + streamingQueryConfigProvider + ); quoteString = (quoteString == null ? database.getMetaData().getIdentifierQuoteString() : quoteString); diff --git a/airbyte-integrations/connectors/source-jdbc/src/test-integration/java/io/airbyte/integrations/source/jdbc/JdbcSourceSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-jdbc/src/test-integration/java/io/airbyte/integrations/source/jdbc/JdbcSourceSourceAcceptanceTest.java index 1541ae6ce9539..30e63c3797d16 100644 --- a/airbyte-integrations/connectors/source-jdbc/src/test-integration/java/io/airbyte/integrations/source/jdbc/JdbcSourceSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-jdbc/src/test-integration/java/io/airbyte/integrations/source/jdbc/JdbcSourceSourceAcceptanceTest.java @@ -9,7 +9,8 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.resources.MoreResources; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.standardtest.source.SourceAcceptanceTest; import io.airbyte.integrations.standardtest.source.TestDestinationEnv; import io.airbyte.protocol.models.CatalogHelpers; @@ -19,6 +20,7 @@ import io.airbyte.protocol.models.JsonSchemaType; import java.sql.SQLException; import java.util.HashMap; +import org.jooq.SQLDialect; import org.testcontainers.containers.PostgreSQLContainer; /** @@ -48,10 +50,15 @@ protected void setupEnvironment(final TestDestinationEnv environment) throws SQL container.getDatabaseName())) .build()); - database = Databases.createPostgresDatabase( - config.get("username").asText(), - config.get("password").asText(), - config.get("jdbc_url").asText()); + database = new Database( + DSLContextFactory.create( + config.get("username").asText(), + config.get("password").asText(), + DatabaseDriver.POSTGRESQL.getDriverClassName(), + config.get("jdbc_url").asText(), + SQLDialect.POSTGRES + ) + ); database.query(ctx -> { ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); diff --git a/airbyte-integrations/connectors/source-jdbc/src/test/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-jdbc/src/test/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSourceAcceptanceTest.java index d81b0a3d466e8..1622cc4e79c82 100644 --- a/airbyte-integrations/connectors/source-jdbc/src/test/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-jdbc/src/test/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSourceAcceptanceTest.java @@ -9,6 +9,7 @@ import io.airbyte.commons.io.IOs; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.string.Strings; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.db.jdbc.streaming.AdaptiveStreamingQueryConfig; import io.airbyte.integrations.base.IntegrationRunner; @@ -90,7 +91,7 @@ private static class PostgresTestSource extends AbstractJdbcSource imp private static final Logger LOGGER = LoggerFactory.getLogger(PostgresTestSource.class); - static final String DRIVER_CLASS = "org.postgresql.Driver"; + static final String DRIVER_CLASS = DatabaseDriver.POSTGRESQL.getDriverClassName(); public PostgresTestSource() { super(DRIVER_CLASS, AdaptiveStreamingQueryConfig::new, JdbcUtils.getDefaultSourceOperations()); @@ -100,9 +101,9 @@ public PostgresTestSource() { public JsonNode toDatabaseConfig(final JsonNode config) { final ImmutableMap.Builder configBuilder = ImmutableMap.builder() .put("username", config.get("username").asText()) - .put("jdbc_url", String.format("jdbc:postgresql://%s:%s/%s", + .put("jdbc_url", String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), config.get("host").asText(), - config.get("port").asText(), + config.get("port").asInt(), config.get("database").asText())); if (config.has("password")) { diff --git a/airbyte-integrations/connectors/source-jdbc/src/test/java/io/airbyte/integrations/source/jdbc/DefaultJdbcStressTest.java b/airbyte-integrations/connectors/source-jdbc/src/test/java/io/airbyte/integrations/source/jdbc/DefaultJdbcStressTest.java index 06ca30f2b3ba5..02cde9a867b0f 100644 --- a/airbyte-integrations/connectors/source-jdbc/src/test/java/io/airbyte/integrations/source/jdbc/DefaultJdbcStressTest.java +++ b/airbyte-integrations/connectors/source-jdbc/src/test/java/io/airbyte/integrations/source/jdbc/DefaultJdbcStressTest.java @@ -9,6 +9,7 @@ import io.airbyte.commons.io.IOs; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.string.Strings; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.db.jdbc.streaming.AdaptiveStreamingQueryConfig; import io.airbyte.integrations.base.IntegrationRunner; @@ -95,7 +96,7 @@ private static class PostgresTestSource extends AbstractJdbcSource imp private static final Logger LOGGER = LoggerFactory.getLogger(PostgresTestSource.class); - static final String DRIVER_CLASS = "org.postgresql.Driver"; + static final String DRIVER_CLASS = DatabaseDriver.POSTGRESQL.getDriverClassName(); public PostgresTestSource() { super(DRIVER_CLASS, AdaptiveStreamingQueryConfig::new, JdbcUtils.getDefaultSourceOperations()); @@ -105,9 +106,9 @@ public PostgresTestSource() { public JsonNode toDatabaseConfig(final JsonNode config) { final ImmutableMap.Builder configBuilder = ImmutableMap.builder() .put("username", config.get("username").asText()) - .put("jdbc_url", String.format("jdbc:postgresql://%s:%s/%s", + .put("jdbc_url", String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), config.get("host").asText(), - config.get("port").asText(), + config.get("port").asInt(), config.get("database").asText())); if (config.has("password")) { diff --git a/airbyte-integrations/connectors/source-jdbc/src/test/java/io/airbyte/integrations/source/jdbc/JdbcSourceStressTest.java b/airbyte-integrations/connectors/source-jdbc/src/test/java/io/airbyte/integrations/source/jdbc/JdbcSourceStressTest.java index 192fdb38a1e4c..7468a1d554480 100644 --- a/airbyte-integrations/connectors/source-jdbc/src/test/java/io/airbyte/integrations/source/jdbc/JdbcSourceStressTest.java +++ b/airbyte-integrations/connectors/source-jdbc/src/test/java/io/airbyte/integrations/source/jdbc/JdbcSourceStressTest.java @@ -9,6 +9,7 @@ import io.airbyte.commons.io.IOs; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.string.Strings; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.db.jdbc.streaming.AdaptiveStreamingQueryConfig; import io.airbyte.integrations.base.IntegrationRunner; @@ -93,7 +94,7 @@ private static class PostgresTestSource extends AbstractJdbcSource imp private static final Logger LOGGER = LoggerFactory.getLogger(PostgresTestSource.class); - static final String DRIVER_CLASS = "org.postgresql.Driver"; + static final String DRIVER_CLASS = DatabaseDriver.POSTGRESQL.getDriverClassName(); public PostgresTestSource() { super(DRIVER_CLASS, AdaptiveStreamingQueryConfig::new, JdbcUtils.getDefaultSourceOperations()); @@ -103,9 +104,9 @@ public PostgresTestSource() { public JsonNode toDatabaseConfig(final JsonNode config) { final ImmutableMap.Builder configBuilder = ImmutableMap.builder() .put("username", config.get("username").asText()) - .put("jdbc_url", String.format("jdbc:postgresql://%s:%s/%s", + .put("jdbc_url", String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), config.get("host").asText(), - config.get("port").asText(), + config.get("port").asInt(), config.get("database").asText())); if (config.has("password")) { diff --git a/airbyte-integrations/connectors/source-jdbc/src/testFixtures/java/io/airbyte/integrations/source/jdbc/test/JdbcSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-jdbc/src/testFixtures/java/io/airbyte/integrations/source/jdbc/test/JdbcSourceAcceptanceTest.java index ed66b4f428d18..467a26a43fd16 100644 --- a/airbyte-integrations/connectors/source-jdbc/src/testFixtures/java/io/airbyte/integrations/source/jdbc/test/JdbcSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-jdbc/src/testFixtures/java/io/airbyte/integrations/source/jdbc/test/JdbcSourceAcceptanceTest.java @@ -22,10 +22,11 @@ import io.airbyte.commons.resources.MoreResources; import io.airbyte.commons.string.Strings; import io.airbyte.commons.util.MoreIterators; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DataSourceFactory; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.db.jdbc.JdbcSourceOperations; import io.airbyte.db.jdbc.JdbcUtils; +import io.airbyte.db.jdbc.StreamingJdbcDatabase; import io.airbyte.db.jdbc.streaming.AdaptiveStreamingQueryConfig; import io.airbyte.integrations.base.Source; import io.airbyte.integrations.source.jdbc.AbstractJdbcSource; @@ -202,14 +203,17 @@ public void setup() throws Exception { streamName = TABLE_NAME; - database = Databases.createStreamingJdbcDatabase( - jdbcConfig.get("username").asText(), - jdbcConfig.has("password") ? jdbcConfig.get("password").asText() : null, - jdbcConfig.get("jdbc_url").asText(), - getDriverClass(), - AdaptiveStreamingQueryConfig::new, - JdbcUtils.parseJdbcParameters(jdbcConfig, "connection_properties", getJdbcParameterDelimiter()), - JdbcUtils.getDefaultSourceOperations()); + database = new StreamingJdbcDatabase( + DataSourceFactory.create( + jdbcConfig.get("username").asText(), + jdbcConfig.has("password") ? jdbcConfig.get("password").asText() : null, + getDriverClass(), + jdbcConfig.get("jdbc_url").asText(), + JdbcUtils.parseJdbcParameters(jdbcConfig, "connection_properties", getJdbcParameterDelimiter()) + ), + JdbcUtils.getDefaultSourceOperations(), + AdaptiveStreamingQueryConfig::new + ); if (supportsSchemas()) { createSchemas(); diff --git a/airbyte-integrations/connectors/source-jdbc/src/testFixtures/java/io/airbyte/integrations/source/jdbc/test/JdbcStressTest.java b/airbyte-integrations/connectors/source-jdbc/src/testFixtures/java/io/airbyte/integrations/source/jdbc/test/JdbcStressTest.java index 1870b64c87d0b..6ca768e5b1aff 100644 --- a/airbyte-integrations/connectors/source-jdbc/src/testFixtures/java/io/airbyte/integrations/source/jdbc/test/JdbcStressTest.java +++ b/airbyte-integrations/connectors/source-jdbc/src/testFixtures/java/io/airbyte/integrations/source/jdbc/test/JdbcStressTest.java @@ -13,7 +13,8 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.stream.MoreStreams; import io.airbyte.commons.string.Strings; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.source.jdbc.AbstractJdbcSource; import io.airbyte.protocol.models.AirbyteCatalog; @@ -114,11 +115,14 @@ public void setup() throws Exception { config = getConfig(); final JsonNode jdbcConfig = source.toDatabaseConfig(config); - final JdbcDatabase database = Databases.createJdbcDatabase( - jdbcConfig.get("username").asText(), - jdbcConfig.has("password") ? jdbcConfig.get("password").asText() : null, - jdbcConfig.get("jdbc_url").asText(), - getDriverClass()); + final JdbcDatabase database = new DefaultJdbcDatabase( + DataSourceFactory.create( + jdbcConfig.get("username").asText(), + jdbcConfig.has("password") ? jdbcConfig.get("password").asText() : null, + getDriverClass(), + jdbcConfig.get("jdbc_url").asText() + ) + ); database.execute(connection -> connection.createStatement().execute( createTableQuery("id_and_name", String.format("id %s, name VARCHAR(200)", COL_ID_TYPE)))); diff --git a/airbyte-integrations/connectors/source-kafka/build.gradle b/airbyte-integrations/connectors/source-kafka/build.gradle index e42edca972377..78ef8ad884c01 100644 --- a/airbyte-integrations/connectors/source-kafka/build.gradle +++ b/airbyte-integrations/connectors/source-kafka/build.gradle @@ -19,7 +19,7 @@ dependencies { integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-source-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:source-kafka') - integrationTestJavaImplementation "org.testcontainers:kafka:1.15.3" + integrationTestJavaImplementation libs.testcontainers.kafka implementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) } diff --git a/airbyte-integrations/connectors/source-mongodb-v2/build.gradle b/airbyte-integrations/connectors/source-mongodb-v2/build.gradle index 3f1e1c7b8a724..d503ce48b38af 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/build.gradle +++ b/airbyte-integrations/connectors/source-mongodb-v2/build.gradle @@ -18,7 +18,7 @@ dependencies { implementation 'org.mongodb:mongodb-driver-sync:4.4.0' - testImplementation 'org.testcontainers:mongodb:1.15.3' + testImplementation libs.testcontainers.mongodb integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-source-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:source-mongodb-v2') diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io.airbyte.integrations.source.mongodb/MongoDbSource.java b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io.airbyte.integrations.source.mongodb/MongoDbSource.java index e4f4c73235ddc..73f34ce8485af 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io.airbyte.integrations.source.mongodb/MongoDbSource.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io.airbyte.integrations.source.mongodb/MongoDbSource.java @@ -13,7 +13,6 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.util.AutoCloseableIterator; import io.airbyte.commons.util.AutoCloseableIterators; -import io.airbyte.db.Databases; import io.airbyte.db.mongodb.MongoDatabase; import io.airbyte.db.mongodb.MongoUtils; import io.airbyte.db.mongodb.MongoUtils.MongoInstanceType; @@ -81,7 +80,7 @@ public JsonNode toDatabaseConfig(final JsonNode config) { @Override protected MongoDatabase createDatabase(final JsonNode config) throws Exception { final var dbConfig = toDatabaseConfig(config); - return Databases.createMongoDatabase(dbConfig.get("connectionString").asText(), + return new MongoDatabase(dbConfig.get("connectionString").asText(), dbConfig.get("database").asText()); } @@ -131,7 +130,7 @@ protected List>> discoverInternal(final MongoDat return tableInfos; } - private Set getAuthorizedCollections(MongoDatabase database) { + private Set getAuthorizedCollections(final MongoDatabase database) { /* * db.runCommand ({listCollections: 1.0, authorizedCollections: true, nameOnly: true }) the command * returns only those collections for which the user has privileges. For example, if a user has find @@ -139,7 +138,7 @@ private Set getAuthorizedCollections(MongoDatabase database) { * find or any other action, on the database resource, the command lists all collections in the * database. */ - Document document = database.getDatabase().runCommand(new Document("listCollections", 1) + final Document document = database.getDatabase().runCommand(new Document("listCollections", 1) .append("authorizedCollections", true) .append("nameOnly", true)) .append("filter", "{ 'type': 'collection' }"); diff --git a/airbyte-integrations/connectors/source-mssql-strict-encrypt/build.gradle b/airbyte-integrations/connectors/source-mssql-strict-encrypt/build.gradle index 417845bb2efeb..0a330893fe80a 100644 --- a/airbyte-integrations/connectors/source-mssql-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/source-mssql-strict-encrypt/build.gradle @@ -21,7 +21,7 @@ dependencies { testImplementation testFixtures(project(':airbyte-integrations:connectors:source-jdbc')) testImplementation 'org.apache.commons:commons-lang3:3.11' - testImplementation "org.testcontainers:mssqlserver:1.15.3" + testImplementation libs.testcontainers.mssqlserver integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-source-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:source-mssql-strict-encrypt') diff --git a/airbyte-integrations/connectors/source-mssql-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/mssql/MssqlStrictEncryptSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mssql-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/mssql/MssqlStrictEncryptSourceAcceptanceTest.java index d853c61656599..b344a5d245516 100644 --- a/airbyte-integrations/connectors/source-mssql-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/mssql/MssqlStrictEncryptSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mssql-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/mssql/MssqlStrictEncryptSourceAcceptanceTest.java @@ -10,7 +10,9 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.resources.MoreResources; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.base.ssh.SshHelpers; import io.airbyte.integrations.standardtest.source.SourceAcceptanceTest; import io.airbyte.integrations.standardtest.source.TestDestinationEnv; @@ -21,7 +23,9 @@ import io.airbyte.protocol.models.JsonSchemaType; import java.sql.SQLException; import java.util.HashMap; +import javax.sql.DataSource; import org.apache.commons.lang3.RandomStringUtils; +import org.jooq.DSLContext; import org.testcontainers.containers.MSSQLServerContainer; import org.testcontainers.utility.DockerImageName; @@ -66,14 +70,14 @@ protected void setupEnvironment(final TestDestinationEnv environment) throws SQL } private static Database getDatabase(final JsonNode baseConfig) { - return Databases.createDatabase( + final DSLContext dslContext = DSLContextFactory.create( baseConfig.get("username").asText(), baseConfig.get("password").asText(), + DatabaseDriver.MSSQLSERVER.getDriverClassName(), String.format("jdbc:sqlserver://%s:%s;encrypt=true;trustServerCertificate=true;", baseConfig.get("host").asText(), - baseConfig.get("port").asInt()), - "com.microsoft.sqlserver.jdbc.SQLServerDriver", - null); + baseConfig.get("port").asInt()), null); + return new Database(dslContext); } @Override diff --git a/airbyte-integrations/connectors/source-mssql-strict-encrypt/src/test/java/io/airbyte/integrations/source/mssql/MssqlStrictEncryptJdbcSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mssql-strict-encrypt/src/test/java/io/airbyte/integrations/source/mssql/MssqlStrictEncryptJdbcSourceAcceptanceTest.java index 5813742a20746..02dcd38199775 100644 --- a/airbyte-integrations/connectors/source-mssql-strict-encrypt/src/test/java/io/airbyte/integrations/source/mssql/MssqlStrictEncryptJdbcSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mssql-strict-encrypt/src/test/java/io/airbyte/integrations/source/mssql/MssqlStrictEncryptJdbcSourceAcceptanceTest.java @@ -12,7 +12,9 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.resources.MoreResources; import io.airbyte.commons.string.Strings; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.base.Source; import io.airbyte.integrations.base.ssh.SshHelpers; @@ -48,13 +50,16 @@ public void setup() throws Exception { .put("password", dbContainer.getPassword()) .build()); - database = Databases.createJdbcDatabase( - configWithoutDbName.get("username").asText(), - configWithoutDbName.get("password").asText(), - String.format("jdbc:sqlserver://%s:%s", - configWithoutDbName.get("host").asText(), - configWithoutDbName.get("port").asInt()), - "com.microsoft.sqlserver.jdbc.SQLServerDriver"); + database = new DefaultJdbcDatabase( + DataSourceFactory.create( + configWithoutDbName.get("username").asText(), + configWithoutDbName.get("password").asText(), + DatabaseDriver.MSSQLSERVER.getDriverClassName(), + String.format("jdbc:sqlserver://%s:%d", + configWithoutDbName.get("host").asText(), + configWithoutDbName.get("port").asInt()) + ) + ); final String dbName = Strings.addRandomSuffix("db", "_", 10).toLowerCase(); diff --git a/airbyte-integrations/connectors/source-mssql/build.gradle b/airbyte-integrations/connectors/source-mssql/build.gradle index 11bcc51355591..5c6f970c0ea6e 100644 --- a/airbyte-integrations/connectors/source-mssql/build.gradle +++ b/airbyte-integrations/connectors/source-mssql/build.gradle @@ -11,7 +11,7 @@ application { } dependencies { - implementation "org.postgresql:postgresql:42.2.18" + implementation libs.postgresql implementation project(':airbyte-db:lib') implementation project(':airbyte-integrations:bases:base-java') @@ -27,7 +27,7 @@ dependencies { testImplementation testFixtures(project(':airbyte-integrations:connectors:source-jdbc')) testImplementation 'org.apache.commons:commons-lang3:3.11' - testImplementation "org.testcontainers:mssqlserver:1.15.3" + testImplementation libs.testcontainers.mssqlserver integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-source-test') performanceTestJavaImplementation project(':airbyte-integrations:bases:standard-source-test') diff --git a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlSource.java b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlSource.java index d8b97821ea4e9..5c1667c705ace 100644 --- a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlSource.java +++ b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlSource.java @@ -16,6 +16,7 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.util.AutoCloseableIterator; import io.airbyte.commons.util.AutoCloseableIterators; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.db.jdbc.streaming.AdaptiveStreamingQueryConfig; import io.airbyte.integrations.base.IntegrationRunner; @@ -50,7 +51,7 @@ public class MssqlSource extends AbstractJdbcSource implements Source private static final Logger LOGGER = LoggerFactory.getLogger(MssqlSource.class); - static final String DRIVER_CLASS = "com.microsoft.sqlserver.jdbc.SQLServerDriver"; + static final String DRIVER_CLASS = DatabaseDriver.MSSQLSERVER.getDriverClassName(); public static final String MSSQL_CDC_OFFSET = "mssql_cdc_offset"; public static final String MSSQL_DB_HISTORY = "mssql_db_history"; public static final String CDC_LSN = "_ab_cdc_lsn"; diff --git a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/AbstractSshMssqlSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/AbstractSshMssqlSourceAcceptanceTest.java index 1e869ebb77723..b827e1e1f48fd 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/AbstractSshMssqlSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/AbstractSshMssqlSourceAcceptanceTest.java @@ -9,7 +9,9 @@ import com.google.common.collect.Lists; import io.airbyte.commons.json.Jsons; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.base.ssh.SshBastionContainer; import io.airbyte.integrations.base.ssh.SshHelpers; import io.airbyte.integrations.base.ssh.SshTunnel; @@ -26,7 +28,9 @@ import java.util.HashMap; import java.util.List; import java.util.Objects; +import javax.sql.DataSource; import org.apache.commons.lang3.RandomStringUtils; +import org.jooq.DSLContext; import org.testcontainers.containers.JdbcDatabaseContainer; import org.testcontainers.containers.MSSQLServerContainer; import org.testcontainers.containers.Network; @@ -63,14 +67,14 @@ public ImmutableMap.Builder getMSSQLDbConfigBuilder(final JdbcDa } private static Database getDatabaseFromConfig(final JsonNode config) { - return Databases.createDatabase( + final DSLContext dslContext = DSLContextFactory.create( config.get("username").asText(), config.get("password").asText(), - String.format("jdbc:sqlserver://%s:%s;", + DatabaseDriver.MSSQLSERVER.getDriverClassName(), + String.format("jdbc:sqlserver://%s:%d;", config.get("host").asText(), - config.get("port").asInt()), - "com.microsoft.sqlserver.jdbc.SQLServerDriver", - null); + config.get("port").asInt()), null); + return new Database(dslContext); } private void startTestContainers() { diff --git a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceAcceptanceTest.java index 5b82dd8185a78..c177f4db63500 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceAcceptanceTest.java @@ -9,7 +9,7 @@ import com.google.common.collect.Lists; import io.airbyte.commons.json.Jsons; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; import io.airbyte.integrations.base.ssh.SshHelpers; import io.airbyte.integrations.standardtest.source.SourceAcceptanceTest; import io.airbyte.integrations.standardtest.source.TestDestinationEnv; @@ -22,6 +22,7 @@ import io.airbyte.protocol.models.JsonSchemaType; import io.airbyte.protocol.models.SyncMode; import java.util.List; +import org.jooq.DSLContext; import org.testcontainers.containers.MSSQLServerContainer; public class CdcMssqlSourceAcceptanceTest extends SourceAcceptanceTest { @@ -91,14 +92,15 @@ protected void setupEnvironment(final TestDestinationEnv environment) throws Int container = new MSSQLServerContainer<>("mcr.microsoft.com/mssql/server:2019-latest").acceptLicense(); container.addEnv("MSSQL_AGENT_ENABLED", "True"); // need this running for cdc to work container.start(); - database = Databases.createDatabase( + + final DSLContext dslContext = DSLContextFactory.create( container.getUsername(), container.getPassword(), - String.format("jdbc:sqlserver://%s:%s", - container.getHost(), - container.getFirstMappedPort()), - "com.microsoft.sqlserver.jdbc.SQLServerDriver", - null); + container.getDriverClassName(), + String.format("jdbc:sqlserver://%s:%d;", + config.get("host").asText(), + config.get("port").asInt()), null); + database = new Database(dslContext); config = Jsons.jsonNode(ImmutableMap.builder() .put("host", container.getHost()) diff --git a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceDatatypeTest.java b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceDatatypeTest.java index 5aec0eed0be65..b625658b973d0 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceDatatypeTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceDatatypeTest.java @@ -8,11 +8,13 @@ import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DataSourceFactory; import io.airbyte.integrations.standardtest.source.AbstractSourceDatabaseTypeTest; import io.airbyte.integrations.standardtest.source.TestDataHolder; import io.airbyte.integrations.standardtest.source.TestDestinationEnv; import io.airbyte.protocol.models.JsonSchemaType; +import org.jooq.DSLContext; import org.testcontainers.containers.MSSQLServerContainer; public class CdcMssqlSourceDatatypeTest extends AbstractSourceDatabaseTypeTest { @@ -47,14 +49,14 @@ protected Database setupDatabase() throws Exception { container.addEnv("MSSQL_AGENT_ENABLED", "True"); // need this running for cdc to work container.start(); - final Database database = Databases.createDatabase( + final DSLContext dslContext = DSLContextFactory.create( container.getUsername(), container.getPassword(), - String.format("jdbc:sqlserver://%s:%s", - container.getHost(), - container.getFirstMappedPort()), - "com.microsoft.sqlserver.jdbc.SQLServerDriver", - null); + container.getDriverClassName(), + String.format("jdbc:sqlserver://%s:%s;", + config.get("host").asText(), + config.get("port").asInt()), null); + final Database database = new Database(dslContext); config = Jsons.jsonNode(ImmutableMap.builder() .put("host", container.getHost()) @@ -78,14 +80,16 @@ protected String getNameSpace() { } private void executeQuery(final String query) { - try (final Database database = Databases.createDatabase( + final DSLContext dslContext = DSLContextFactory.create( + DataSourceFactory.create( container.getUsername(), container.getPassword(), - String.format("jdbc:sqlserver://%s:%s", - container.getHost(), - container.getFirstMappedPort()), - "com.microsoft.sqlserver.jdbc.SQLServerDriver", - null)) { + container.getDriverClassName(), + String.format("jdbc:sqlserver://%s:%d;", + config.get("host").asText(), + config.get("port").asInt())), null); + + try (final Database database = new Database(dslContext)) { database.query( ctx -> ctx .execute(query)); diff --git a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/MssqlRdsSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/MssqlRdsSourceAcceptanceTest.java index c70ab223f9033..001440f7392b4 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/MssqlRdsSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/MssqlRdsSourceAcceptanceTest.java @@ -9,11 +9,15 @@ import io.airbyte.commons.io.IOs; import io.airbyte.commons.json.Jsons; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.standardtest.source.TestDestinationEnv; import java.nio.file.Path; import java.sql.SQLException; +import javax.sql.DataSource; import org.apache.commons.lang3.RandomStringUtils; +import org.jooq.DSLContext; public class MssqlRdsSourceAcceptanceTest extends MssqlSourceAcceptanceTest { @@ -50,15 +54,15 @@ private Database getDatabase() { case "unencrypted" -> additionalParameter = "encrypt=false;"; case "encrypted_trust_server_certificate" -> additionalParameter = "encrypt=true;trustServerCertificate=true;"; } - return Databases.createDatabase( + final DSLContext dslContext = DSLContextFactory.create( baseConfig.get("username").asText(), baseConfig.get("password").asText(), - String.format("jdbc:sqlserver://%s:%s;%s", + DatabaseDriver.MSSQLSERVER.getDriverClassName(), + String.format("jdbc:sqlserver://%s:%d;%s", baseConfig.get("host").asText(), baseConfig.get("port").asInt(), - additionalParameter), - "com.microsoft.sqlserver.jdbc.SQLServerDriver", - null); + additionalParameter), null); + return new Database(dslContext); } @Override diff --git a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/MssqlSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/MssqlSourceAcceptanceTest.java index fdcaaf5215c34..408581234b7a1 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/MssqlSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/MssqlSourceAcceptanceTest.java @@ -10,7 +10,9 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.string.Strings; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.base.ssh.SshHelpers; import io.airbyte.integrations.standardtest.source.SourceAcceptanceTest; import io.airbyte.integrations.standardtest.source.TestDestinationEnv; @@ -21,6 +23,8 @@ import io.airbyte.protocol.models.JsonSchemaType; import java.sql.SQLException; import java.util.HashMap; +import javax.sql.DataSource; +import org.jooq.DSLContext; import org.testcontainers.containers.MSSQLServerContainer; public class MssqlSourceAcceptanceTest extends SourceAcceptanceTest { @@ -96,14 +100,14 @@ protected JsonNode getState() { } private static Database getDatabase(final JsonNode config) { - return Databases.createDatabase( + final DSLContext dslContext = DSLContextFactory.create( config.get("username").asText(), config.get("password").asText(), - String.format("jdbc:sqlserver://%s:%s", + DatabaseDriver.MSSQLSERVER.getDriverClassName(), + String.format("jdbc:sqlserver://%s:%d;", config.get("host").asText(), - config.get("port").asInt()), - "com.microsoft.sqlserver.jdbc.SQLServerDriver", - null); + config.get("port").asInt()), null); + return new Database(dslContext); } } diff --git a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/MssqlSourceDatatypeTest.java b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/MssqlSourceDatatypeTest.java index a55bba73c8bbb..4508c4a36451e 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/MssqlSourceDatatypeTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/MssqlSourceDatatypeTest.java @@ -10,11 +10,15 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.string.Strings; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.standardtest.source.AbstractSourceDatabaseTypeTest; import io.airbyte.integrations.standardtest.source.TestDataHolder; import io.airbyte.integrations.standardtest.source.TestDestinationEnv; import io.airbyte.protocol.models.JsonSchemaType; +import javax.sql.DataSource; +import org.jooq.DSLContext; import org.testcontainers.containers.MSSQLServerContainer; public class MssqlSourceDatatypeTest extends AbstractSourceDatabaseTypeTest { @@ -50,14 +54,14 @@ protected Database setupDatabase() throws Exception { } private static Database getDatabase(final JsonNode config) { - return Databases.createDatabase( + final DSLContext dslContext = DSLContextFactory.create( config.get("username").asText(), config.get("password").asText(), - String.format("jdbc:sqlserver://%s:%s", + DatabaseDriver.MSSQLSERVER.getDriverClassName(), + String.format("jdbc:sqlserver://%s:%d;", config.get("host").asText(), - config.get("port").asInt()), - "com.microsoft.sqlserver.jdbc.SQLServerDriver", - null); + config.get("port").asInt()), null); + return new Database(dslContext); } @Override diff --git a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/SslEnabledMssqlSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/SslEnabledMssqlSourceAcceptanceTest.java index b1c8581e4e38b..a2cb9a2bc11f3 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/SslEnabledMssqlSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/SslEnabledMssqlSourceAcceptanceTest.java @@ -9,10 +9,12 @@ import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.standardtest.source.TestDestinationEnv; import java.sql.SQLException; import org.apache.commons.lang3.RandomStringUtils; +import org.jooq.DSLContext; import org.testcontainers.containers.MSSQLServerContainer; import org.testcontainers.utility.DockerImageName; @@ -52,14 +54,14 @@ protected void setupEnvironment(final TestDestinationEnv environment) throws SQL } private static Database getDatabase(final JsonNode baseConfig) { - return Databases.createDatabase( + final DSLContext dslContext = DSLContextFactory.create( baseConfig.get("username").asText(), baseConfig.get("password").asText(), + DatabaseDriver.MSSQLSERVER.getDriverClassName(), String.format("jdbc:sqlserver://%s:%s;encrypt=true;trustServerCertificate=true;", baseConfig.get("host").asText(), - baseConfig.get("port").asInt()), - "com.microsoft.sqlserver.jdbc.SQLServerDriver", - null); + baseConfig.get("port").asInt()), null); + return new Database(dslContext); } } diff --git a/airbyte-integrations/connectors/source-mssql/src/test-performance/java/io/airbyte/integrations/source/mssql/FillMsSqlTestDbScriptTest.java b/airbyte-integrations/connectors/source-mssql/src/test-performance/java/io/airbyte/integrations/source/mssql/FillMsSqlTestDbScriptTest.java index ccc2996916e35..0a7c760c5ce8c 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test-performance/java/io/airbyte/integrations/source/mssql/FillMsSqlTestDbScriptTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test-performance/java/io/airbyte/integrations/source/mssql/FillMsSqlTestDbScriptTest.java @@ -8,7 +8,8 @@ import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.standardtest.source.TestDestinationEnv; import io.airbyte.integrations.standardtest.source.performancetest.AbstractSourceFillDbWithTestData; import java.util.stream.Stream; @@ -46,17 +47,15 @@ protected Database setupDatabase(final String dbName) { .put("replication_method", replicationMethod) .build()); - final Database database = Databases.createDatabase( + return new Database(DSLContextFactory.create( config.get("username").asText(), config.get("password").asText(), + DatabaseDriver.MSSQLSERVER.getDriverClassName(), String.format("jdbc:sqlserver://%s:%s;databaseName=%s;", config.get("host").asText(), config.get("port").asInt(), dbName), - "com.microsoft.sqlserver.jdbc.SQLServerDriver", - null); - - return database; + null)); } /** diff --git a/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceTest.java b/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceTest.java index b2b063aa69f17..c526bfccdebae 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceTest.java @@ -21,12 +21,14 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Maps; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.string.Strings; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; +import io.airbyte.db.jdbc.StreamingJdbcDatabase; import io.airbyte.db.jdbc.streaming.AdaptiveStreamingQueryConfig; import io.airbyte.integrations.base.Source; import io.airbyte.integrations.debezium.CdcSourceTest; @@ -39,6 +41,7 @@ import java.util.List; import java.util.Map; import java.util.Optional; +import org.jooq.DSLContext; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -57,6 +60,7 @@ public class CdcMssqlSourceTest extends CdcSourceTest { private JdbcDatabase testJdbcDatabase; private MssqlSource source; private JsonNode config; + private DSLContext dslContext; @BeforeEach public void setup() throws SQLException { @@ -85,22 +89,24 @@ private void init() { .put("replication_method", "CDC") .build()); - database = Databases.createDatabase( + dslContext = DSLContextFactory.create( container.getUsername(), container.getPassword(), + DRIVER_CLASS, String.format("jdbc:sqlserver://%s:%s", container.getHost(), container.getFirstMappedPort()), - DRIVER_CLASS, null); - testJdbcDatabase = Databases.createJdbcDatabase( + database = new Database(dslContext); + + testJdbcDatabase = new DefaultJdbcDatabase(DataSourceFactory.create( TEST_USER_NAME, TEST_USER_PASSWORD, + DRIVER_CLASS, String.format("jdbc:sqlserver://%s:%s", container.getHost(), - container.getFirstMappedPort()), - DRIVER_CLASS); + container.getFirstMappedPort()))); executeQuery("CREATE DATABASE " + dbName + ";"); switchSnapshotIsolation(true, dbName); @@ -203,7 +209,8 @@ public String columnClause(final Map columnsWithDataType, final @AfterEach public void tearDown() { try { - database.close(); + dslContext.close(); + testJdbcDatabase.close(); container.close(); } catch (final Exception e) { throw new RuntimeException(e); @@ -309,17 +316,17 @@ protected CdcTargetPosition cdcLatestTargetPosition() { } catch (final InterruptedException e) { throw new RuntimeException(e); } - final JdbcDatabase jdbcDatabase = Databases.createStreamingJdbcDatabase( - config.get("username").asText(), + final JdbcDatabase jdbcDatabase = new StreamingJdbcDatabase( + DataSourceFactory.create(config.get("username").asText(), config.get("password").asText(), + DRIVER_CLASS, String.format("jdbc:sqlserver://%s:%s;databaseName=%s;", config.get("host").asText(), config.get("port").asInt(), - dbName), - DRIVER_CLASS, - AdaptiveStreamingQueryConfig::new, - Maps.newHashMap(), - new MssqlSourceOperations()); + dbName)), + new MssqlSourceOperations(), + AdaptiveStreamingQueryConfig::new + ); return MssqlCdcTargetPosition.getTargetPosition(jdbcDatabase, dbName); } diff --git a/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/MssqlJdbcSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/MssqlJdbcSourceAcceptanceTest.java index b90239979322c..72937e76fef87 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/MssqlJdbcSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/MssqlJdbcSourceAcceptanceTest.java @@ -9,7 +9,9 @@ import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.string.Strings; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.source.jdbc.AbstractJdbcSource; import io.airbyte.integrations.source.jdbc.test.JdbcSourceAcceptanceTest; @@ -40,13 +42,16 @@ public void setup() throws Exception { .put("password", dbContainer.getPassword()) .build()); - database = Databases.createJdbcDatabase( - configWithoutDbName.get("username").asText(), - configWithoutDbName.get("password").asText(), - String.format("jdbc:sqlserver://%s:%s", - configWithoutDbName.get("host").asText(), - configWithoutDbName.get("port").asInt()), - "com.microsoft.sqlserver.jdbc.SQLServerDriver"); + database = new DefaultJdbcDatabase( + DataSourceFactory.create( + configWithoutDbName.get("username").asText(), + configWithoutDbName.get("password").asText(), + DatabaseDriver.MSSQLSERVER.getDriverClassName(), + String.format("jdbc:sqlserver://%s:%d", + configWithoutDbName.get("host").asText(), + configWithoutDbName.get("port").asInt()) + ) + ); final String dbName = Strings.addRandomSuffix("db", "_", 10).toLowerCase(); diff --git a/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/MssqlSourceTest.java b/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/MssqlSourceTest.java index f94365f266273..0485ca54c7d8c 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/MssqlSourceTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/MssqlSourceTest.java @@ -13,7 +13,8 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.string.Strings; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.protocol.models.AirbyteCatalog; import io.airbyte.protocol.models.CatalogHelpers; import io.airbyte.protocol.models.Field; @@ -108,14 +109,16 @@ private JsonNode getConfig(final MSSQLServerContainer db) { public static Database getDatabase(final JsonNode config) { // todo (cgardens) - rework this abstraction so that we do not have to pass a null into the // constructor. at least explicitly handle it, even if the impl doesn't change. - return Databases.createDatabase( - config.get("username").asText(), - config.get("password").asText(), - String.format("jdbc:sqlserver://%s:%s", - config.get("host").asText(), - config.get("port").asInt()), - "com.microsoft.sqlserver.jdbc.SQLServerDriver", - null); + return new Database( + DSLContextFactory.create( + config.get("username").asText(), + config.get("password").asText(), + DatabaseDriver.MSSQLSERVER.getDriverClassName(), + String.format("jdbc:sqlserver://%s:%s", + config.get("host").asText(), + config.get("port").asInt()), + null) + ); } } diff --git a/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/MssqlStressTest.java b/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/MssqlStressTest.java index f8c1b5d8954af..3f6e0a7b0790c 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/MssqlStressTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/MssqlStressTest.java @@ -9,7 +9,9 @@ import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.string.Strings; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.source.jdbc.AbstractJdbcSource; import io.airbyte.integrations.source.jdbc.test.JdbcStressTest; @@ -43,13 +45,16 @@ public void setup() throws Exception { .put("password", dbContainer.getPassword()) .build()); - database = Databases.createJdbcDatabase( - configWithoutDbName.get("username").asText(), - configWithoutDbName.get("password").asText(), - String.format("jdbc:sqlserver://%s:%s", - configWithoutDbName.get("host").asText(), - configWithoutDbName.get("port").asInt()), - "com.microsoft.sqlserver.jdbc.SQLServerDriver"); + database = new DefaultJdbcDatabase( + DataSourceFactory.create( + configWithoutDbName.get("username").asText(), + configWithoutDbName.get("password").asText(), + DatabaseDriver.MSSQLSERVER.getDriverClassName(), + String.format("jdbc:sqlserver://%s:%d", + configWithoutDbName.get("host").asText(), + configWithoutDbName.get("port").asInt()) + ) + ); final String dbName = Strings.addRandomSuffix("db", "_", 10).toLowerCase(); diff --git a/airbyte-integrations/connectors/source-mysql-strict-encrypt/build.gradle b/airbyte-integrations/connectors/source-mysql-strict-encrypt/build.gradle index a1590a593fcff..6696af405ae58 100644 --- a/airbyte-integrations/connectors/source-mysql-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/source-mysql-strict-encrypt/build.gradle @@ -21,7 +21,7 @@ dependencies { testImplementation testFixtures(project(':airbyte-integrations:connectors:source-jdbc')) testImplementation project(':airbyte-test-utils') - testImplementation 'org.testcontainers:mysql:1.15.3' + testImplementation libs.testcontainers.mysql integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-source-test') diff --git a/airbyte-integrations/connectors/source-mysql-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/mysql_strict_encrypt/MySqlStrictEncryptSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mysql-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/mysql_strict_encrypt/MySqlStrictEncryptSourceAcceptanceTest.java index f46ce730bbfb5..80f3e8adec3a2 100644 --- a/airbyte-integrations/connectors/source-mysql-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/mysql_strict_encrypt/MySqlStrictEncryptSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mysql-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/mysql_strict_encrypt/MySqlStrictEncryptSourceAcceptanceTest.java @@ -12,7 +12,9 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.resources.MoreResources; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.base.ssh.SshHelpers; import io.airbyte.integrations.source.mysql.MySqlSource; import io.airbyte.integrations.standardtest.source.SourceAcceptanceTest; @@ -26,6 +28,8 @@ import io.airbyte.protocol.models.JsonSchemaType; import io.airbyte.protocol.models.SyncMode; import java.util.HashMap; +import javax.sql.DataSource; +import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.testcontainers.containers.MySQLContainer; @@ -51,26 +55,25 @@ protected void setupEnvironment(final TestDestinationEnv environment) throws Exc .put("replication_method", MySqlSource.ReplicationMethod.STANDARD) .build()); - final Database database = Databases.createDatabase( + try (final DSLContext dslContext = DSLContextFactory.create( config.get("username").asText(), - config.get("password").asText(), + "", + DatabaseDriver.MYSQL.getDriverClassName(), String.format("jdbc:mysql://%s:%s/%s?%s", config.get("host").asText(), config.get("port").asText(), config.get("database").asText(), - String.join("&", SSL_PARAMETERS)), - "com.mysql.cj.jdbc.Driver", - SQLDialect.MYSQL); - - database.query(ctx -> { - ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); - ctx.fetch("INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); - ctx.fetch("CREATE TABLE starships(id INTEGER, name VARCHAR(200));"); - ctx.fetch("INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); - return null; - }); - - database.close(); + String.join("&", SSL_PARAMETERS)), SQLDialect.MYSQL)) { + final Database database = new Database(dslContext); + + database.query(ctx -> { + ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); + ctx.fetch("INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); + ctx.fetch("CREATE TABLE starships(id INTEGER, name VARCHAR(200));"); + ctx.fetch("INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); + return null; + }); + } } @Override diff --git a/airbyte-integrations/connectors/source-mysql-strict-encrypt/src/test/java/io/airbyte/integrations/source/mysql_strict_encrypt/MySqlStrictEncryptJdbcSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mysql-strict-encrypt/src/test/java/io/airbyte/integrations/source/mysql_strict_encrypt/MySqlStrictEncryptJdbcSourceAcceptanceTest.java index d6d38b2419c83..48235e1395e5f 100644 --- a/airbyte-integrations/connectors/source-mysql-strict-encrypt/src/test/java/io/airbyte/integrations/source/mysql_strict_encrypt/MySqlStrictEncryptJdbcSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mysql-strict-encrypt/src/test/java/io/airbyte/integrations/source/mysql_strict_encrypt/MySqlStrictEncryptJdbcSourceAcceptanceTest.java @@ -13,7 +13,8 @@ import io.airbyte.commons.resources.MoreResources; import io.airbyte.commons.string.Strings; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.base.Source; import io.airbyte.integrations.base.ssh.SshHelpers; import io.airbyte.integrations.source.jdbc.test.JdbcSourceAcceptanceTest; @@ -22,6 +23,7 @@ import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; +import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; @@ -37,6 +39,7 @@ class MySqlStrictEncryptJdbcSourceAcceptanceTest extends JdbcSourceAcceptanceTes protected static MySQLContainer container; protected Database database; + protected DSLContext dslContext; @BeforeAll static void init() throws SQLException { @@ -60,29 +63,27 @@ public void setup() throws Exception { .put("password", TEST_PASSWORD) .build()); - database = Databases.createDatabase( + dslContext = DSLContextFactory.create( config.get("username").asText(), - config.get("password").asText(), + "", + DatabaseDriver.MYSQL.getDriverClassName(), String.format("jdbc:mysql://%s:%s?%s", config.get("host").asText(), config.get("port").asText(), - String.join("&", SSL_PARAMETERS)), - MySqlSource.DRIVER_CLASS, - - SQLDialect.MYSQL); + String.join("&", SSL_PARAMETERS)), SQLDialect.MYSQL); + database = new Database(dslContext); database.query(ctx -> { ctx.fetch("CREATE DATABASE " + config.get("database").asText()); return null; }); - database.close(); super.setup(); } @AfterEach void tearDownMySql() throws Exception { - database.close(); + dslContext.close(); super.tearDown(); } diff --git a/airbyte-integrations/connectors/source-mysql/build.gradle b/airbyte-integrations/connectors/source-mysql/build.gradle index 296f77ab2d283..01bd7d91a5c94 100644 --- a/airbyte-integrations/connectors/source-mysql/build.gradle +++ b/airbyte-integrations/connectors/source-mysql/build.gradle @@ -24,7 +24,7 @@ dependencies { testImplementation testFixtures(project(':airbyte-integrations:bases:debezium')) testImplementation testFixtures(project(':airbyte-integrations:connectors:source-jdbc')) testImplementation 'org.apache.commons:commons-lang3:3.11' - testImplementation 'org.testcontainers:mysql:1.15.3' + testImplementation libs.testcontainers.mysql integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-source-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:source-mysql') diff --git a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/MySqlSource.java b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/MySqlSource.java index 9afbfc5392694..bc34c4270d1aa 100644 --- a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/MySqlSource.java +++ b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/MySqlSource.java @@ -16,6 +16,7 @@ import io.airbyte.commons.functional.CheckedConsumer; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.util.AutoCloseableIterator; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.db.jdbc.streaming.AdaptiveStreamingQueryConfig; import io.airbyte.integrations.base.IntegrationRunner; @@ -47,7 +48,7 @@ public class MySqlSource extends AbstractJdbcSource implements Source private static final Logger LOGGER = LoggerFactory.getLogger(MySqlSource.class); - public static final String DRIVER_CLASS = "com.mysql.cj.jdbc.Driver"; + public static final String DRIVER_CLASS = DatabaseDriver.MYSQL.getDriverClassName(); public static final String MYSQL_CDC_OFFSET = "mysql_cdc_offset"; public static final String MYSQL_DB_HISTORY = "mysql_db_history"; public static final String CDC_LOG_FILE = "_ab_cdc_log_file"; diff --git a/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/source/mysql/CdcMySqlSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/source/mysql/CdcMySqlSourceAcceptanceTest.java index 04084d6a62110..b50b814018722 100644 --- a/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/source/mysql/CdcMySqlSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/source/mysql/CdcMySqlSourceAcceptanceTest.java @@ -13,7 +13,8 @@ import com.google.common.collect.Lists; import io.airbyte.commons.json.Jsons; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.base.ssh.SshHelpers; import io.airbyte.integrations.source.mysql.MySqlSource.ReplicationMethod; import io.airbyte.integrations.standardtest.source.SourceAcceptanceTest; @@ -31,6 +32,7 @@ import io.airbyte.protocol.models.SyncMode; import java.util.List; import java.util.stream.Collectors; +import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.junit.jupiter.api.Test; import org.testcontainers.containers.MySQLContainer; @@ -130,15 +132,15 @@ private void grantCorrectPermissions() { } private void executeQuery(final String query) { - try (final Database database = Databases.createDatabase( + final DSLContext dslContext = DSLContextFactory.create( "root", "test", - String.format("jdbc:mysql://%s:%s/%s", + DatabaseDriver.MYSQL.getDriverClassName(), + String.format(DatabaseDriver.MYSQL.getUrlFormatString(), container.getHost(), container.getFirstMappedPort(), - container.getDatabaseName()), - MySqlSource.DRIVER_CLASS, - SQLDialect.MYSQL)) { + container.getDatabaseName()), SQLDialect.MYSQL); + try (final Database database = new Database(dslContext)) { database.query( ctx -> ctx .execute(query)); diff --git a/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/source/mysql/CdcMySqlSourceDatatypeTest.java b/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/source/mysql/CdcMySqlSourceDatatypeTest.java index 40453e2280106..d78864d7aedec 100644 --- a/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/source/mysql/CdcMySqlSourceDatatypeTest.java +++ b/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/source/mysql/CdcMySqlSourceDatatypeTest.java @@ -8,7 +8,8 @@ import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.standardtest.source.AbstractSourceDatabaseTypeTest; import io.airbyte.integrations.standardtest.source.TestDataHolder; import io.airbyte.integrations.standardtest.source.TestDestinationEnv; @@ -18,6 +19,7 @@ import org.apache.commons.codec.binary.Base64; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.StringUtils; +import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -59,15 +61,15 @@ protected Database setupDatabase() throws Exception { .put("replication_method", MySqlSource.ReplicationMethod.CDC) .build()); - final Database database = Databases.createDatabase( + final DSLContext dslContext = DSLContextFactory.create( config.get("username").asText(), config.get("password").asText(), - String.format("jdbc:mysql://%s:%s/%s", + DatabaseDriver.MYSQL.getDriverClassName(), + String.format(DatabaseDriver.MYSQL.getUrlFormatString(), config.get("host").asText(), - config.get("port").asText(), - config.get("database").asText()), - "com.mysql.cj.jdbc.Driver", - SQLDialect.MYSQL); + config.get("port").asInt(), + config.get("database").asText()), SQLDialect.MYSQL); + final Database database = new Database(dslContext); // It disable strict mode in the DB and allows to insert specific values. // For example, it's possible to insert date with zero values "2021-00-00" @@ -95,15 +97,16 @@ private void grantCorrectPermissions() { } private void executeQuery(final String query) { - try (final Database database = Databases.createDatabase( + final DSLContext dslContext = DSLContextFactory.create( "root", "test", - String.format("jdbc:mysql://%s:%s/%s", + DatabaseDriver.MYSQL.getDriverClassName(), + String.format(DatabaseDriver.MYSQL.getUrlFormatString(), container.getHost(), container.getFirstMappedPort(), - container.getDatabaseName()), - MySqlSource.DRIVER_CLASS, - SQLDialect.MYSQL)) { + container.getDatabaseName()), SQLDialect.MYSQL); + + try (final Database database = new Database(dslContext)) { database.query( ctx -> ctx .execute(query)); @@ -409,10 +412,10 @@ private String getLogString(final int length) { } private String getFileDataInBase64() { - File file = new File(getClass().getClassLoader().getResource("test.png").getFile()); + final File file = new File(getClass().getClassLoader().getResource("test.png").getFile()); try { return Base64.encodeBase64String(FileUtils.readFileToByteArray(file)); - } catch (IOException e) { + } catch (final IOException e) { LOGGER.error(String.format("Fail to read the file: %s. Error: %s", file.getAbsoluteFile(), e.getMessage())); } return null; diff --git a/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/source/mysql/MySqlSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/source/mysql/MySqlSourceAcceptanceTest.java index d3966a335a704..6c6b7708aa795 100644 --- a/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/source/mysql/MySqlSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/source/mysql/MySqlSourceAcceptanceTest.java @@ -9,7 +9,8 @@ import com.google.common.collect.Lists; import io.airbyte.commons.json.Jsons; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.base.ssh.SshHelpers; import io.airbyte.integrations.source.mysql.MySqlSource.ReplicationMethod; import io.airbyte.integrations.standardtest.source.SourceAcceptanceTest; @@ -23,6 +24,7 @@ import io.airbyte.protocol.models.JsonSchemaType; import io.airbyte.protocol.models.SyncMode; import java.util.HashMap; +import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.testcontainers.containers.MySQLContainer; @@ -48,25 +50,24 @@ protected void setupEnvironment(final TestDestinationEnv environment) throws Exc .put("replication_method", ReplicationMethod.STANDARD) .build()); - final Database database = Databases.createDatabase( + try (final DSLContext dslContext = DSLContextFactory.create( config.get("username").asText(), config.get("password").asText(), - String.format("jdbc:mysql://%s:%s/%s", + DatabaseDriver.MYSQL.getDriverClassName(), + String.format(DatabaseDriver.MYSQL.getUrlFormatString(), config.get("host").asText(), - config.get("port").asText(), - config.get("database").asText()), - "com.mysql.cj.jdbc.Driver", - SQLDialect.MYSQL); - - database.query(ctx -> { - ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); - ctx.fetch("INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); - ctx.fetch("CREATE TABLE starships(id INTEGER, name VARCHAR(200));"); - ctx.fetch("INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); - return null; - }); - - database.close(); + config.get("port").asInt(), + config.get("database").asText()), SQLDialect.MYSQL)) { + final Database database = new Database(dslContext); + + database.query(ctx -> { + ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); + ctx.fetch("INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); + ctx.fetch("CREATE TABLE starships(id INTEGER, name VARCHAR(200));"); + ctx.fetch("INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); + return null; + }); + } } @Override diff --git a/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/source/mysql/MySqlSourceDatatypeTest.java b/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/source/mysql/MySqlSourceDatatypeTest.java index dbbbc529b767e..8867e857e83fb 100644 --- a/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/source/mysql/MySqlSourceDatatypeTest.java +++ b/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/source/mysql/MySqlSourceDatatypeTest.java @@ -9,7 +9,8 @@ import com.mysql.cj.MysqlType; import io.airbyte.commons.json.Jsons; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.source.mysql.MySqlSource.ReplicationMethod; import io.airbyte.integrations.standardtest.source.AbstractSourceDatabaseTypeTest; import io.airbyte.integrations.standardtest.source.TestDataHolder; @@ -66,16 +67,18 @@ protected Database setupDatabase() throws Exception { .put("replication_method", ReplicationMethod.STANDARD) .build()); - final Database database = Databases.createDatabase( - config.get("username").asText(), - config.get("password").asText(), - String.format("jdbc:mysql://%s:%s/%s", - config.get("host").asText(), - config.get("port").asText(), - config.get("database").asText()), - "com.mysql.cj.jdbc.Driver", - SQLDialect.MYSQL, - Map.of("zeroDateTimeBehavior", "convertToNull")); + final Database database = new Database( + DSLContextFactory.create( + config.get("username").asText(), + config.get("password").asText(), + DatabaseDriver.MYSQL.getDriverClassName(), + String.format(DatabaseDriver.MYSQL.getUrlFormatString(), + config.get("host").asText(), + config.get("port").asInt(), + config.get("database").asText()), + SQLDialect.MYSQL, + Map.of("zeroDateTimeBehavior", "convertToNull")) + ); // It disable strict mode in the DB and allows to insert specific values. // For example, it's possible to insert date with zero values "2021-00-00" diff --git a/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/source/mysql/MySqlSslSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/source/mysql/MySqlSslSourceAcceptanceTest.java index e33e39d269dcf..48e693bda7036 100644 --- a/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/source/mysql/MySqlSslSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/source/mysql/MySqlSslSourceAcceptanceTest.java @@ -9,9 +9,11 @@ import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.source.mysql.MySqlSource.ReplicationMethod; import io.airbyte.integrations.standardtest.source.TestDestinationEnv; +import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.testcontainers.containers.MySQLContainer; @@ -32,28 +34,27 @@ protected void setupEnvironment(final TestDestinationEnv environment) throws Exc .put("replication_method", ReplicationMethod.STANDARD) .build()); - final Database database = Databases.createDatabase( + try (final DSLContext dslContext = DSLContextFactory.create( config.get("username").asText(), config.get("password").asText(), + DatabaseDriver.MYSQL.getDriverClassName(), String.format("jdbc:mysql://%s:%s/%s?%s", config.get("host").asText(), config.get("port").asText(), config.get("database").asText(), - String.join("&", SSL_PARAMETERS)), - "com.mysql.cj.jdbc.Driver", - SQLDialect.MYSQL); - - database.query(ctx -> { - ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); - ctx.fetch( - "INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); - ctx.fetch("CREATE TABLE starships(id INTEGER, name VARCHAR(200));"); - ctx.fetch( - "INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); - return null; - }); - - database.close(); + String.join("&", SSL_PARAMETERS)), SQLDialect.MYSQL)) { + final Database database = new Database(dslContext); + + database.query(ctx -> { + ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); + ctx.fetch( + "INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); + ctx.fetch("CREATE TABLE starships(id INTEGER, name VARCHAR(200));"); + ctx.fetch( + "INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); + return null; + }); + } } } diff --git a/airbyte-integrations/connectors/source-mysql/src/test-performance/java/io/airbyte/integrations/source/mysql/FillMySqlTestDbScriptTest.java b/airbyte-integrations/connectors/source-mysql/src/test-performance/java/io/airbyte/integrations/source/mysql/FillMySqlTestDbScriptTest.java index f2a3ca890868a..b7d1d8a94c5e5 100644 --- a/airbyte-integrations/connectors/source-mysql/src/test-performance/java/io/airbyte/integrations/source/mysql/FillMySqlTestDbScriptTest.java +++ b/airbyte-integrations/connectors/source-mysql/src/test-performance/java/io/airbyte/integrations/source/mysql/FillMySqlTestDbScriptTest.java @@ -8,7 +8,8 @@ import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.source.mysql.MySqlSource.ReplicationMethod; import io.airbyte.integrations.standardtest.source.TestDestinationEnv; import io.airbyte.integrations.standardtest.source.performancetest.AbstractSourceFillDbWithTestData; @@ -45,16 +46,18 @@ protected Database setupDatabase(final String dbName) throws Exception { .put("replication_method", ReplicationMethod.STANDARD) .build()); - final Database database = Databases.createDatabase( - config.get("username").asText(), - config.get("password").asText(), - String.format("jdbc:mysql://%s:%s/%s", - config.get("host").asText(), - config.get("port").asText(), - dbName), - "com.mysql.cj.jdbc.Driver", - SQLDialect.MYSQL, - Map.of("zeroDateTimeBehavior", "convertToNull")); + final Database database = new Database( + DSLContextFactory.create( + config.get("username").asText(), + config.get("password").asText(), + DatabaseDriver.MYSQL.getDriverClassName(), + String.format(DatabaseDriver.MYSQL.getUrlFormatString(), + config.get("host").asText(), + config.get("port").asInt(), + config.get("database").asText()), + SQLDialect.MYSQL, + Map.of("zeroDateTimeBehavior", "convertToNull")) + ); // It disable strict mode in the DB and allows to insert specific values. // For example, it's possible to insert date with zero values "2021-00-00" diff --git a/airbyte-integrations/connectors/source-mysql/src/test-performance/java/io/airbyte/integrations/source/mysql/MySqlRdsSourcePerformanceSecretTest.java b/airbyte-integrations/connectors/source-mysql/src/test-performance/java/io/airbyte/integrations/source/mysql/MySqlRdsSourcePerformanceSecretTest.java index 2d95fe80bd552..163924e758683 100644 --- a/airbyte-integrations/connectors/source-mysql/src/test-performance/java/io/airbyte/integrations/source/mysql/MySqlRdsSourcePerformanceSecretTest.java +++ b/airbyte-integrations/connectors/source-mysql/src/test-performance/java/io/airbyte/integrations/source/mysql/MySqlRdsSourcePerformanceSecretTest.java @@ -9,11 +9,13 @@ import io.airbyte.commons.io.IOs; import io.airbyte.commons.json.Jsons; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.standardtest.source.performancetest.AbstractSourcePerformanceTest; import java.nio.file.Path; import java.util.Map; import java.util.stream.Stream; +import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.junit.jupiter.params.provider.Arguments; @@ -39,21 +41,23 @@ protected void setupDatabase(final String dbName) throws Exception { .put("replication_method", plainConfig.get("replication_method")) .build()); - final Database database = Databases.createDatabase( + try (final DSLContext dslContext = DSLContextFactory.create( config.get("username").asText(), config.get("password").asText(), - String.format("jdbc:mysql://%s:%s/%s", + DatabaseDriver.MYSQL.getDriverClassName(), + String.format(DatabaseDriver.MYSQL.getUrlFormatString(), config.get("host").asText(), - config.get("port").asText(), - dbName), - "com.mysql.cj.jdbc.Driver", + config.get("port").asInt(), + config.get("database").asText()), SQLDialect.MYSQL, - Map.of("zeroDateTimeBehavior", "convertToNull")); + Map.of("zeroDateTimeBehavior", "convertToNull"))) { - // It disable strict mode in the DB and allows to insert specific values. - // For example, it's possible to insert date with zero values "2021-00-00" - database.query(ctx -> ctx.execute("SET @@sql_mode=''")); - database.close(); + final Database database = new Database(dslContext); + + // It disable strict mode in the DB and allows to insert specific values. + // For example, it's possible to insert date with zero values "2021-00-00" + database.query(ctx -> ctx.execute("SET @@sql_mode=''")); + } } /** diff --git a/airbyte-integrations/connectors/source-mysql/src/test/java/io/airbyte/integrations/source/mysql/MySqlJdbcSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mysql/src/test/java/io/airbyte/integrations/source/mysql/MySqlJdbcSourceAcceptanceTest.java index 0b4525659f394..7cf4d9b0e323f 100644 --- a/airbyte-integrations/connectors/source-mysql/src/test/java/io/airbyte/integrations/source/mysql/MySqlJdbcSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mysql/src/test/java/io/airbyte/integrations/source/mysql/MySqlJdbcSourceAcceptanceTest.java @@ -13,13 +13,15 @@ import io.airbyte.commons.resources.MoreResources; import io.airbyte.commons.string.Strings; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.source.jdbc.AbstractJdbcSource; import io.airbyte.integrations.source.jdbc.test.JdbcSourceAcceptanceTest; import io.airbyte.protocol.models.ConnectorSpecification; import java.sql.Connection; import java.sql.DriverManager; import java.util.concurrent.Callable; +import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; @@ -35,6 +37,7 @@ class MySqlJdbcSourceAcceptanceTest extends JdbcSourceAcceptanceTest { protected static MySQLContainer container; protected Database database; + protected DSLContext dslContext; @BeforeAll static void init() throws Exception { @@ -58,28 +61,26 @@ public void setup() throws Exception { .put("password", TEST_PASSWORD.call()) .build()); - database = Databases.createDatabase( + dslContext = DSLContextFactory.create( config.get("username").asText(), config.get("password").asText(), + DatabaseDriver.MYSQL.getDriverClassName(), String.format("jdbc:mysql://%s:%s", config.get("host").asText(), - config.get("port").asText()), - MySqlSource.DRIVER_CLASS, - - SQLDialect.MYSQL); + config.get("port").asText()), SQLDialect.MYSQL); + database = new Database(dslContext); database.query(ctx -> { ctx.fetch("CREATE DATABASE " + config.get("database").asText()); return null; }); - database.close(); super.setup(); } @AfterEach void tearDownMySql() throws Exception { - database.close(); + dslContext.close(); super.tearDown(); } diff --git a/airbyte-integrations/connectors/source-mysql/src/test/java/io/airbyte/integrations/source/mysql/MySqlSslJdbcSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mysql/src/test/java/io/airbyte/integrations/source/mysql/MySqlSslJdbcSourceAcceptanceTest.java index d6780431f480c..c64bdee25abc2 100644 --- a/airbyte-integrations/connectors/source-mysql/src/test/java/io/airbyte/integrations/source/mysql/MySqlSslJdbcSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mysql/src/test/java/io/airbyte/integrations/source/mysql/MySqlSslJdbcSourceAcceptanceTest.java @@ -9,7 +9,10 @@ import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.string.Strings; -import io.airbyte.db.Databases; +import io.airbyte.db.Database; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DatabaseDriver; +import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.junit.jupiter.api.BeforeEach; @@ -26,23 +29,23 @@ public void setup() throws Exception { .put("ssl", true) .build()); - database = Databases.createDatabase( + try (final DSLContext dslContext = DSLContextFactory.create( config.get("username").asText(), config.get("password").asText(), - String.format("jdbc:mysql://%s:%s?%s", + DatabaseDriver.MYSQL.getDriverClassName(), + String.format("jdbc:mysql://%s:%s/%s?%s", config.get("host").asText(), config.get("port").asText(), - String.join("&", SSL_PARAMETERS)), - MySqlSource.DRIVER_CLASS, - - SQLDialect.MYSQL); - - database.query(ctx -> { - ctx.fetch("CREATE DATABASE " + config.get("database").asText()); - ctx.fetch("SHOW STATUS LIKE 'Ssl_cipher'"); - return null; - }); - database.close(); + config.get("database").asText(), + String.join("&", SSL_PARAMETERS)), SQLDialect.MYSQL)) { + database = new Database(dslContext); + + database.query(ctx -> { + ctx.fetch("CREATE DATABASE " + config.get("database").asText()); + ctx.fetch("SHOW STATUS LIKE 'Ssl_cipher'"); + return null; + }); + } super.setup(); } diff --git a/airbyte-integrations/connectors/source-mysql/src/test/java/io/airbyte/integrations/source/mysql/MySqlStressTest.java b/airbyte-integrations/connectors/source-mysql/src/test/java/io/airbyte/integrations/source/mysql/MySqlStressTest.java index a33e9db6a725e..645c0f57ada56 100644 --- a/airbyte-integrations/connectors/source-mysql/src/test/java/io/airbyte/integrations/source/mysql/MySqlStressTest.java +++ b/airbyte-integrations/connectors/source-mysql/src/test/java/io/airbyte/integrations/source/mysql/MySqlStressTest.java @@ -10,13 +10,15 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.string.Strings; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.source.jdbc.AbstractJdbcSource; import io.airbyte.integrations.source.jdbc.test.JdbcStressTest; import java.sql.Connection; import java.sql.DriverManager; import java.util.Optional; import java.util.concurrent.Callable; +import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; @@ -34,6 +36,7 @@ class MySqlStressTest extends JdbcStressTest { private JsonNode config; private Database database; + private DSLContext dslContext; @BeforeAll static void init() throws Exception { @@ -57,27 +60,26 @@ public void setup() throws Exception { .put("password", TEST_PASSWORD.call()) .build()); - database = Databases.createDatabase( + dslContext = DSLContextFactory.create( config.get("username").asText(), config.get("password").asText(), + DatabaseDriver.MYSQL.getDriverClassName(), String.format("jdbc:mysql://%s:%s", config.get("host").asText(), - config.get("port").asText()), - MySqlSource.DRIVER_CLASS, - SQLDialect.MYSQL); + config.get("port").asText()), SQLDialect.MYSQL); + database = new Database(dslContext); database.query(ctx -> { ctx.fetch("CREATE DATABASE " + config.get("database").asText()); return null; }); - database.close(); super.setup(); } @AfterEach - void tearDown() throws Exception { - database.close(); + void tearDown() { + dslContext.close(); } @AfterAll diff --git a/airbyte-integrations/connectors/source-oracle-strict-encrypt/build.gradle b/airbyte-integrations/connectors/source-oracle-strict-encrypt/build.gradle index dc40076a4e494..7544d22f5da7e 100644 --- a/airbyte-integrations/connectors/source-oracle-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/source-oracle-strict-encrypt/build.gradle @@ -27,7 +27,7 @@ dependencies { testImplementation project(':airbyte-test-utils') testImplementation 'org.apache.commons:commons-lang3:3.11' - testImplementation 'org.testcontainers:oracle-xe:1.15.3' + testImplementation libs.testcontainers.oracle.xe integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-source-test') diff --git a/airbyte-integrations/connectors/source-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/oracle_strict_encrypt/OracleSourceNneAcceptanceTest.java b/airbyte-integrations/connectors/source-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/oracle_strict_encrypt/OracleSourceNneAcceptanceTest.java index 720226fffbcf9..895068e01efe5 100644 --- a/airbyte-integrations/connectors/source-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/oracle_strict_encrypt/OracleSourceNneAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/oracle_strict_encrypt/OracleSourceNneAcceptanceTest.java @@ -11,7 +11,9 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.db.jdbc.JdbcUtils; import java.sql.SQLException; @@ -31,16 +33,20 @@ public void testEncryption() throws SQLException { final String algorithm = clone.get("encryption") .get("encryption_algorithm").asText(); - final JdbcDatabase database = Databases.createJdbcDatabase(clone.get("username").asText(), - clone.get("password").asText(), - String.format("jdbc:oracle:thin:@//%s:%s/%s", - clone.get("host").asText(), - clone.get("port").asText(), - clone.get("sid").asText()), - "oracle.jdbc.driver.OracleDriver", - JdbcUtils.parseJdbcParameters("oracle.net.encryption_client=REQUIRED&" + - "oracle.net.encryption_types_client=( " - + algorithm + " )")); + final JdbcDatabase database = new DefaultJdbcDatabase( + DataSourceFactory.create( + clone.get("username").asText(), + clone.get("password").asText(), + DatabaseDriver.ORACLE.getDriverClassName(), + String.format(DatabaseDriver.ORACLE.getUrlFormatString(), + clone.get("host").asText(), + clone.get("port").asInt(), + clone.get("sid").asText()), + JdbcUtils.parseJdbcParameters("oracle.net.encryption_client=REQUIRED&" + + "oracle.net.encryption_types_client=( " + + algorithm + " )") + ) + ); final String networkServiceBanner = "select network_service_banner from v$session_connect_info where sid in (select distinct sid from v$mystat)"; @@ -61,15 +67,19 @@ public void testCheckProtocol() throws SQLException { final String algorithm = clone.get("encryption") .get("encryption_algorithm").asText(); - final JdbcDatabase database = Databases.createJdbcDatabase(clone.get("username").asText(), - clone.get("password").asText(), - String.format("jdbc:oracle:thin:@//%s:%s/%s", - clone.get("host").asText(), - clone.get("port").asText(), - clone.get("sid").asText()), - "oracle.jdbc.driver.OracleDriver", - JdbcUtils.parseJdbcParameters("oracle.net.encryption_client=REQUIRED;" + - "oracle.net.encryption_types_client=( " + algorithm + " )", ";")); + final JdbcDatabase database = new DefaultJdbcDatabase( + DataSourceFactory.create( + clone.get("username").asText(), + clone.get("password").asText(), + DatabaseDriver.ORACLE.getDriverClassName(), + String.format(DatabaseDriver.ORACLE.getUrlFormatString(), + clone.get("host").asText(), + clone.get("port").asInt(), + clone.get("sid").asText()), + JdbcUtils.parseJdbcParameters("oracle.net.encryption_client=REQUIRED;" + + "oracle.net.encryption_types_client=( " + algorithm + " )", ";") + ) + ); final String networkServiceBanner = "SELECT sys_context('USERENV', 'NETWORK_PROTOCOL') as network_protocol FROM dual"; final List collect = database.queryJsons(networkServiceBanner); diff --git a/airbyte-integrations/connectors/source-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/oracle_strict_encrypt/OracleStrictEncryptSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/oracle_strict_encrypt/OracleStrictEncryptSourceAcceptanceTest.java index 0106bd75a5129..b0b640a5dde29 100644 --- a/airbyte-integrations/connectors/source-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/oracle_strict_encrypt/OracleStrictEncryptSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/oracle_strict_encrypt/OracleStrictEncryptSourceAcceptanceTest.java @@ -9,7 +9,9 @@ import com.google.common.collect.Lists; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.resources.MoreResources; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.integrations.base.ssh.SshHelpers; @@ -22,8 +24,10 @@ import io.airbyte.protocol.models.Field; import io.airbyte.protocol.models.JsonSchemaType; import io.airbyte.protocol.models.SyncMode; +import java.io.Closeable; import java.util.HashMap; import java.util.List; +import javax.sql.DataSource; public class OracleStrictEncryptSourceAcceptanceTest extends SourceAcceptanceTest { @@ -54,15 +58,19 @@ protected void setupEnvironment(final TestDestinationEnv environment) throws Exc .build())) .build()); - final JdbcDatabase database = Databases.createJdbcDatabase(config.get("username").asText(), + final DataSource dataSource = DataSourceFactory.create( + config.get("username").asText(), config.get("password").asText(), - String.format("jdbc:oracle:thin:@//%s:%s/%s", + DatabaseDriver.ORACLE.getDriverClassName(), + String.format(DatabaseDriver.ORACLE.getUrlFormatString(), config.get("host").asText(), - config.get("port").asText(), + config.get("port").asInt(), config.get("sid").asText()), - "oracle.jdbc.driver.OracleDriver", JdbcUtils.parseJdbcParameters("oracle.net.encryption_client=REQUIRED;" + - "oracle.net.encryption_types_client=( 3DES168 )", ";")); + "oracle.net.encryption_types_client=( 3DES168 )", ";") + ); + + final JdbcDatabase database = new DefaultJdbcDatabase(dataSource); database.execute(connection -> { connection.createStatement().execute("CREATE USER JDBC_SPACE IDENTIFIED BY JDBC_SPACE DEFAULT TABLESPACE USERS QUOTA UNLIMITED ON USERS"); @@ -76,7 +84,9 @@ protected void setupEnvironment(final TestDestinationEnv environment) throws Exc connection.createStatement().execute("INSERT INTO jdbc_space.starships (id, name) VALUES (3, 'yamato')"); }); - database.close(); + if(dataSource instanceof Closeable closeable) { + closeable.close(); + } } @Override diff --git a/airbyte-integrations/connectors/source-oracle/build.gradle b/airbyte-integrations/connectors/source-oracle/build.gradle index 7f67eb97119e4..593ede089ed69 100644 --- a/airbyte-integrations/connectors/source-oracle/build.gradle +++ b/airbyte-integrations/connectors/source-oracle/build.gradle @@ -26,7 +26,7 @@ dependencies { testImplementation project(':airbyte-test-utils') testImplementation 'org.apache.commons:commons-lang3:3.11' - testImplementation 'org.testcontainers:oracle-xe:1.15.3' + testImplementation libs.testcontainers.oracle.xe integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-source-test') diff --git a/airbyte-integrations/connectors/source-oracle/src/main/java/io/airbyte/integrations/source/oracle/OracleSource.java b/airbyte-integrations/connectors/source-oracle/src/main/java/io/airbyte/integrations/source/oracle/OracleSource.java index acd8e04c6a58b..ad6a0f3cc4af0 100644 --- a/airbyte-integrations/connectors/source-oracle/src/main/java/io/airbyte/integrations/source/oracle/OracleSource.java +++ b/airbyte-integrations/connectors/source-oracle/src/main/java/io/airbyte/integrations/source/oracle/OracleSource.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.db.jdbc.streaming.AdaptiveStreamingQueryConfig; @@ -33,7 +34,7 @@ public class OracleSource extends AbstractJdbcSource implements Source private static final Logger LOGGER = LoggerFactory.getLogger(OracleSource.class); - public static final String DRIVER_CLASS = "oracle.jdbc.OracleDriver"; + public static final String DRIVER_CLASS = DatabaseDriver.ORACLE.getDriverClassName(); private List schemas; diff --git a/airbyte-integrations/connectors/source-oracle/src/test-integration/java/io/airbyte/integrations/source/oracle/AbstractSshOracleSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-oracle/src/test-integration/java/io/airbyte/integrations/source/oracle/AbstractSshOracleSourceAcceptanceTest.java index cb95a60ac9377..a9a193fac7cda 100644 --- a/airbyte-integrations/connectors/source-oracle/src/test-integration/java/io/airbyte/integrations/source/oracle/AbstractSshOracleSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-oracle/src/test-integration/java/io/airbyte/integrations/source/oracle/AbstractSshOracleSourceAcceptanceTest.java @@ -8,7 +8,9 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import io.airbyte.commons.json.Jsons; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.base.ssh.SshBastionContainer; import io.airbyte.integrations.base.ssh.SshHelpers; @@ -46,13 +48,17 @@ protected void setupEnvironment(final TestDestinationEnv environment) throws Exc } private void populateDatabaseTestData() throws Exception { - final JdbcDatabase database = Databases.createJdbcDatabase(config.get("username").asText(), - config.get("password").asText(), - String.format("jdbc:oracle:thin:@//%s:%s/%s", - config.get("host").asText(), - config.get("port").asText(), - config.get("sid").asText()), - "oracle.jdbc.driver.OracleDriver"); + final JdbcDatabase database = new DefaultJdbcDatabase( + DataSourceFactory.create( + config.get("username").asText(), + config.get("password").asText(), + DatabaseDriver.ORACLE.getDriverClassName(), + String.format(DatabaseDriver.ORACLE.getUrlFormatString(), + config.get("host").asText(), + config.get("port").asInt(), + config.get("sid").asText()) + ) + ); database.execute(connection -> { connection.createStatement().execute("CREATE USER JDBC_SPACE IDENTIFIED BY JDBC_SPACE DEFAULT TABLESPACE USERS QUOTA UNLIMITED ON USERS"); diff --git a/airbyte-integrations/connectors/source-oracle/src/test-integration/java/io/airbyte/integrations/source/oracle/OracleSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-oracle/src/test-integration/java/io/airbyte/integrations/source/oracle/OracleSourceAcceptanceTest.java index 332b712840b3c..bc686fdc7acdd 100644 --- a/airbyte-integrations/connectors/source-oracle/src/test-integration/java/io/airbyte/integrations/source/oracle/OracleSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-oracle/src/test-integration/java/io/airbyte/integrations/source/oracle/OracleSourceAcceptanceTest.java @@ -8,7 +8,9 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import io.airbyte.commons.json.Jsons; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.base.ssh.SshHelpers; import io.airbyte.integrations.standardtest.source.SourceAcceptanceTest; @@ -51,13 +53,17 @@ protected void setupEnvironment(final TestDestinationEnv environment) throws Exc .build())) .build()); - final JdbcDatabase database = Databases.createJdbcDatabase(config.get("username").asText(), - config.get("password").asText(), - String.format("jdbc:oracle:thin:@//%s:%s/%s", - config.get("host").asText(), - config.get("port").asText(), - config.get("sid").asText()), - "oracle.jdbc.driver.OracleDriver"); + final JdbcDatabase database = new DefaultJdbcDatabase( + DataSourceFactory.create( + config.get("username").asText(), + config.get("password").asText(), + DatabaseDriver.ORACLE.getDriverClassName(), + String.format(DatabaseDriver.ORACLE.getUrlFormatString(), + config.get("host").asText(), + config.get("port").asInt(), + config.get("sid").asText()) + ) + ); database.execute(connection -> { connection.createStatement().execute("CREATE USER JDBC_SPACE IDENTIFIED BY JDBC_SPACE DEFAULT TABLESPACE USERS QUOTA UNLIMITED ON USERS"); diff --git a/airbyte-integrations/connectors/source-oracle/src/test-integration/java/io/airbyte/integrations/source/oracle/OracleSourceDatatypeTest.java b/airbyte-integrations/connectors/source-oracle/src/test-integration/java/io/airbyte/integrations/source/oracle/OracleSourceDatatypeTest.java index b655321dc7378..a3aa297d43664 100644 --- a/airbyte-integrations/connectors/source-oracle/src/test-integration/java/io/airbyte/integrations/source/oracle/OracleSourceDatatypeTest.java +++ b/airbyte-integrations/connectors/source-oracle/src/test-integration/java/io/airbyte/integrations/source/oracle/OracleSourceDatatypeTest.java @@ -8,7 +8,8 @@ import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.standardtest.source.AbstractSourceDatabaseTypeTest; import io.airbyte.integrations.standardtest.source.TestDataHolder; import io.airbyte.integrations.standardtest.source.TestDestinationEnv; @@ -21,6 +22,7 @@ import java.util.Date; import java.util.List; import java.util.TimeZone; +import org.jooq.DSLContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testcontainers.containers.OracleContainer; @@ -46,12 +48,15 @@ protected Database setupDatabase() throws Exception { .put("schemas", List.of("TEST")) .build()); - final Database database = Databases.createOracleDatabase(config.get("username").asText(), + final DSLContext dslContext = DSLContextFactory.create( + config.get("username").asText(), config.get("password").asText(), - String.format("jdbc:oracle:thin:@//%s:%s/%s", + DatabaseDriver.ORACLE.getDriverClassName(), + String.format(DatabaseDriver.ORACLE.getUrlFormatString(), config.get("host").asText(), - config.get("port").asText(), - config.get("sid").asText())); + config.get("port").asInt(), + config.get("sid").asText()), null); + final Database database = new Database(dslContext); LOGGER.warn("config: " + config); database.query(ctx -> ctx.fetch("CREATE USER test IDENTIFIED BY test DEFAULT TABLESPACE USERS QUOTA UNLIMITED ON USERS")); diff --git a/airbyte-integrations/connectors/source-oracle/src/test-integration/java/io/airbyte/integrations/source/oracle/OracleSourceNneAcceptanceTest.java b/airbyte-integrations/connectors/source-oracle/src/test-integration/java/io/airbyte/integrations/source/oracle/OracleSourceNneAcceptanceTest.java index aaabf381fcf1a..cc0e0842f5ad5 100644 --- a/airbyte-integrations/connectors/source-oracle/src/test-integration/java/io/airbyte/integrations/source/oracle/OracleSourceNneAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-oracle/src/test-integration/java/io/airbyte/integrations/source/oracle/OracleSourceNneAcceptanceTest.java @@ -11,7 +11,9 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.db.jdbc.JdbcUtils; import java.sql.SQLException; @@ -31,16 +33,20 @@ public void testEncrytion() throws SQLException { final String algorithm = clone.get("encryption") .get("encryption_algorithm").asText(); - final JdbcDatabase database = Databases.createJdbcDatabase(clone.get("username").asText(), - clone.get("password").asText(), - String.format("jdbc:oracle:thin:@//%s:%s/%s", - clone.get("host").asText(), - clone.get("port").asText(), - clone.get("sid").asText()), - "oracle.jdbc.driver.OracleDriver", - JdbcUtils.parseJdbcParameters("oracle.net.encryption_client=REQUIRED&" + - "oracle.net.encryption_types_client=( " - + algorithm + " )")); + final JdbcDatabase database = new DefaultJdbcDatabase( + DataSourceFactory.create( + config.get("username").asText(), + config.get("password").asText(), + DatabaseDriver.ORACLE.getDriverClassName(), + String.format("jdbc:oracle:thin:@//%s:%d/%s", + clone.get("host").asText(), + clone.get("port").asInt(), + clone.get("sid").asText()), + JdbcUtils.parseJdbcParameters("oracle.net.encryption_client=REQUIRED&" + + "oracle.net.encryption_types_client=( " + + algorithm + " )") + ) + ); final String networkServiceBanner = "select network_service_banner from v$session_connect_info where sid in (select distinct sid from v$mystat)"; @@ -52,13 +58,17 @@ public void testEncrytion() throws SQLException { @Test public void testNoneEncrytion() throws SQLException { - final JdbcDatabase database = Databases.createJdbcDatabase(config.get("username").asText(), - config.get("password").asText(), - String.format("jdbc:oracle:thin:@//%s:%s/%s", - config.get("host").asText(), - config.get("port").asText(), - config.get("sid").asText()), - "oracle.jdbc.driver.OracleDriver"); + final JdbcDatabase database = new DefaultJdbcDatabase( + DataSourceFactory.create( + config.get("username").asText(), + config.get("password").asText(), + DatabaseDriver.ORACLE.getDriverClassName(), + String.format(DatabaseDriver.ORACLE.getUrlFormatString(), + config.get("host").asText(), + config.get("port").asInt(), + config.get("sid").asText()) + ) + ); final String networkServiceBanner = "select network_service_banner from v$session_connect_info where sid in (select distinct sid from v$mystat)"; @@ -78,16 +88,19 @@ public void testCheckProtocol() throws SQLException { final String algorithm = clone.get("encryption") .get("encryption_algorithm").asText(); - final JdbcDatabase database = Databases.createJdbcDatabase(clone.get("username").asText(), - clone.get("password").asText(), - String.format("jdbc:oracle:thin:@//%s:%s/%s", - clone.get("host").asText(), - clone.get("port").asText(), - clone.get("sid").asText()), - "oracle.jdbc.driver.OracleDriver", - JdbcUtils.parseJdbcParameters("oracle.net.encryption_client=REQUIRED&" + - "oracle.net.encryption_types_client=( " - + algorithm + " )")); + final JdbcDatabase database = new DefaultJdbcDatabase( + DataSourceFactory.create( + config.get("username").asText(), + config.get("password").asText(), + DatabaseDriver.ORACLE.getDriverClassName(), + String.format(DatabaseDriver.ORACLE.getUrlFormatString(), + config.get("host").asText(), + config.get("port").asInt(), + config.get("sid").asText()), + JdbcUtils.parseJdbcParameters("oracle.net.encryption_client=REQUIRED&" + + "oracle.net.encryption_types_client=( " + + algorithm + " )") + )); final String networkServiceBanner = "SELECT sys_context('USERENV', 'NETWORK_PROTOCOL') as network_protocol FROM dual"; final List collect = database.queryJsons(networkServiceBanner); diff --git a/airbyte-integrations/connectors/source-oracle/src/test/java/io/airbyte/integrations/source/oracle/OracleSourceTest.java b/airbyte-integrations/connectors/source-oracle/src/test/java/io/airbyte/integrations/source/oracle/OracleSourceTest.java index ff86b31c011fd..0b6e91949970a 100644 --- a/airbyte-integrations/connectors/source-oracle/src/test/java/io/airbyte/integrations/source/oracle/OracleSourceTest.java +++ b/airbyte-integrations/connectors/source-oracle/src/test/java/io/airbyte/integrations/source/oracle/OracleSourceTest.java @@ -12,7 +12,9 @@ import com.google.common.collect.Sets; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.util.MoreIterators; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.protocol.models.AirbyteCatalog; import io.airbyte.protocol.models.AirbyteMessage; @@ -70,13 +72,17 @@ void setup() throws Exception { .put("schemas", List.of("TEST")) .build()); - final JdbcDatabase database = Databases.createJdbcDatabase(config.get("username").asText(), - config.get("password").asText(), - String.format("jdbc:oracle:thin:@//%s:%s/%s", - config.get("host").asText(), - config.get("port").asText(), - config.get("sid").asText()), - "oracle.jdbc.driver.OracleDriver"); + final JdbcDatabase database = new DefaultJdbcDatabase( + DataSourceFactory.create( + config.get("username").asText(), + config.get("password").asText(), + DatabaseDriver.ORACLE.getDriverClassName(), + String.format(DatabaseDriver.ORACLE.getUrlFormatString(), + config.get("host").asText(), + config.get("port").asInt(), + config.get("sid").asText()) + ) + ); database.execute(connection -> { connection.createStatement().execute("CREATE USER TEST IDENTIFIED BY TEST DEFAULT TABLESPACE USERS QUOTA UNLIMITED ON USERS"); diff --git a/airbyte-integrations/connectors/source-postgres-strict-encrypt/build.gradle b/airbyte-integrations/connectors/source-postgres-strict-encrypt/build.gradle index 6a76c5a7f4295..f75bae6518f32 100644 --- a/airbyte-integrations/connectors/source-postgres-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/source-postgres-strict-encrypt/build.gradle @@ -23,7 +23,7 @@ dependencies { testImplementation testFixtures(project(':airbyte-integrations:connectors:source-jdbc')) testImplementation project(':airbyte-test-utils') - testImplementation 'org.testcontainers:postgresql:1.15.3' + testImplementation libs.testcontainers.postgresql integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-source-test') diff --git a/airbyte-integrations/connectors/source-postgres-strict-encrypt/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceStrictEncryptAcceptanceTest.java b/airbyte-integrations/connectors/source-postgres-strict-encrypt/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceStrictEncryptAcceptanceTest.java index 262f870a2b6d5..47ae986902887 100644 --- a/airbyte-integrations/connectors/source-postgres-strict-encrypt/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceStrictEncryptAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-postgres-strict-encrypt/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceStrictEncryptAcceptanceTest.java @@ -10,7 +10,8 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.resources.MoreResources; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.base.ssh.SshHelpers; import io.airbyte.integrations.standardtest.source.SourceAcceptanceTest; import io.airbyte.integrations.standardtest.source.TestDestinationEnv; @@ -23,6 +24,7 @@ import io.airbyte.protocol.models.JsonSchemaType; import io.airbyte.protocol.models.SyncMode; import java.util.HashMap; +import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.testcontainers.containers.PostgreSQLContainer; import org.testcontainers.utility.DockerImageName; @@ -52,25 +54,24 @@ protected void setupEnvironment(final TestDestinationEnv environment) throws Exc .put("replication_method", replicationMethod) .build()); - final Database database = Databases.createDatabase( + try (final DSLContext dslContext = DSLContextFactory.create( config.get("username").asText(), config.get("password").asText(), - String.format("jdbc:postgresql://%s:%s/%s", + DatabaseDriver.POSTGRESQL.getDriverClassName(), + String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), config.get("host").asText(), - config.get("port").asText(), - config.get("database").asText()), - "org.postgresql.Driver", - SQLDialect.POSTGRES); - - database.query(ctx -> { - ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); - ctx.fetch("INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); - ctx.fetch("CREATE TABLE starships(id INTEGER, name VARCHAR(200));"); - ctx.fetch("INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); - return null; - }); - - database.close(); + config.get("port").asInt(), + config.get("database").asText()), SQLDialect.POSTGRES)) { + final Database database = new Database(dslContext); + + database.query(ctx -> { + ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); + ctx.fetch("INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); + ctx.fetch("CREATE TABLE starships(id INTEGER, name VARCHAR(200));"); + ctx.fetch("INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); + return null; + }); + } } @Override diff --git a/airbyte-integrations/connectors/source-postgres/build.gradle b/airbyte-integrations/connectors/source-postgres/build.gradle index 9b70dbf458feb..659190e8fcb30 100644 --- a/airbyte-integrations/connectors/source-postgres/build.gradle +++ b/airbyte-integrations/connectors/source-postgres/build.gradle @@ -19,14 +19,14 @@ dependencies { implementation project(':airbyte-integrations:connectors:source-relational-db') implementation 'org.apache.commons:commons-lang3:3.11' - implementation "org.postgresql:postgresql:42.2.18" + implementation libs.postgresql testImplementation testFixtures(project(':airbyte-integrations:bases:debezium')) testImplementation testFixtures(project(':airbyte-integrations:connectors:source-jdbc')) testImplementation project(":airbyte-json-validation") testImplementation project(':airbyte-test-utils') - testImplementation 'org.testcontainers:postgresql:1.15.3' + testImplementation libs.testcontainers.postgresql integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-source-test') performanceTestJavaImplementation project(':airbyte-integrations:bases:standard-source-test') diff --git a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSource.java b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSource.java index 87f6f3bc2b303..0a8ee9ff0b475 100644 --- a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSource.java +++ b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSource.java @@ -17,6 +17,7 @@ import io.airbyte.commons.functional.CheckedFunction; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.util.AutoCloseableIterator; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.db.jdbc.streaming.AdaptiveStreamingQueryConfig; import io.airbyte.integrations.base.IntegrationRunner; @@ -51,7 +52,7 @@ public class PostgresSource extends AbstractJdbcSource implements Sour private static final Logger LOGGER = LoggerFactory.getLogger(PostgresSource.class); public static final String CDC_LSN = "_ab_cdc_lsn"; - static final String DRIVER_CLASS = "org.postgresql.Driver"; + static final String DRIVER_CLASS = DatabaseDriver.POSTGRESQL.getDriverClassName(); private List schemas; public static Source sshWrappedSource() { diff --git a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractSshPostgresSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractSshPostgresSourceAcceptanceTest.java index 4f0c32793084e..583b9e9354c40 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractSshPostgresSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractSshPostgresSourceAcceptanceTest.java @@ -8,7 +8,8 @@ import com.google.common.collect.Lists; import io.airbyte.commons.json.Jsons; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.base.ssh.SshBastionContainer; import io.airbyte.integrations.base.ssh.SshHelpers; import io.airbyte.integrations.base.ssh.SshTunnel; @@ -24,6 +25,7 @@ import io.airbyte.protocol.models.SyncMode; import java.util.HashMap; import java.util.List; +import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.testcontainers.containers.PostgreSQLContainer; @@ -58,25 +60,24 @@ private void initAndStartJdbcContainer() { } private static void populateDatabaseTestData() throws Exception { - final Database database = Databases.createDatabase( + try (final DSLContext dslContext = DSLContextFactory.create( config.get("username").asText(), config.get("password").asText(), - String.format("jdbc:postgresql://%s:%s/%s", + DatabaseDriver.POSTGRESQL.getDriverClassName(), + String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), config.get("host").asText(), - config.get("port").asText(), - config.get("database").asText()), - "org.postgresql.Driver", - SQLDialect.POSTGRES); - - database.query(ctx -> { - ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); - ctx.fetch("INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); - ctx.fetch("CREATE TABLE starships(id INTEGER, name VARCHAR(200));"); - ctx.fetch("INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); - return null; - }); - - database.close(); + config.get("port").asInt(), + config.get("database").asText()), SQLDialect.POSTGRES)) { + final Database database = new Database(dslContext); + + database.query(ctx -> { + ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); + ctx.fetch("INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); + ctx.fetch("CREATE TABLE starships(id INTEGER, name VARCHAR(200));"); + ctx.fetch("INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); + return null; + }); + } } @Override diff --git a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcPostgresSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcPostgresSourceAcceptanceTest.java index 065fe9e587f04..071327e2f41b6 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcPostgresSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcPostgresSourceAcceptanceTest.java @@ -9,7 +9,8 @@ import com.google.common.collect.Lists; import io.airbyte.commons.json.Jsons; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.base.ssh.SshHelpers; import io.airbyte.integrations.standardtest.source.SourceAcceptanceTest; import io.airbyte.integrations.standardtest.source.TestDestinationEnv; @@ -23,6 +24,7 @@ import io.airbyte.protocol.models.SyncMode; import java.util.HashMap; import java.util.List; +import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.testcontainers.containers.PostgreSQLContainer; import org.testcontainers.utility.MountableFile; @@ -73,30 +75,30 @@ protected void setupEnvironment(final TestDestinationEnv environment) throws Exc .put("ssl", false) .build()); - final Database database = Databases.createDatabase( + try (final DSLContext dslContext = DSLContextFactory.create( config.get("username").asText(), config.get("password").asText(), - String.format("jdbc:postgresql://%s:%s/%s", + DatabaseDriver.POSTGRESQL.getDriverClassName(), + String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), config.get("host").asText(), - config.get("port").asText(), - config.get("database").asText()), - "org.postgresql.Driver", - SQLDialect.POSTGRES); - /** - * cdc expects the INCREMENTAL tables to contain primary key checkout - * {@link io.airbyte.integrations.source.postgres.PostgresSource#removeIncrementalWithoutPk(AirbyteStream)} - */ - database.query(ctx -> { - ctx.execute("SELECT pg_create_logical_replication_slot('" + SLOT_NAME_BASE + "', 'pgoutput');"); - ctx.execute("CREATE PUBLICATION " + PUBLICATION + " FOR ALL TABLES;"); - ctx.execute("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); - ctx.execute("INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); - ctx.execute("CREATE TABLE starships(id INTEGER, name VARCHAR(200));"); - ctx.execute("INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); - return null; - }); - - database.close(); + config.get("port").asInt(), + config.get("database").asText()), SQLDialect.POSTGRES)) { + final Database database = new Database(dslContext); + + /** + * cdc expects the INCREMENTAL tables to contain primary key checkout + * {@link io.airbyte.integrations.source.postgres.PostgresSource#removeIncrementalWithoutPk(AirbyteStream)} + */ + database.query(ctx -> { + ctx.execute("SELECT pg_create_logical_replication_slot('" + SLOT_NAME_BASE + "', 'pgoutput');"); + ctx.execute("CREATE PUBLICATION " + PUBLICATION + " FOR ALL TABLES;"); + ctx.execute("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); + ctx.execute("INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); + ctx.execute("CREATE TABLE starships(id INTEGER, name VARCHAR(200));"); + ctx.execute("INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); + return null; + }); + } } @Override diff --git a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcPostgresSourceDatatypeTest.java b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcPostgresSourceDatatypeTest.java index 8b7b239d28819..616eeac51498d 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcPostgresSourceDatatypeTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcPostgresSourceDatatypeTest.java @@ -8,12 +8,14 @@ import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.standardtest.source.AbstractSourceDatabaseTypeTest; import io.airbyte.integrations.standardtest.source.TestDataHolder; import io.airbyte.integrations.standardtest.source.TestDestinationEnv; import io.airbyte.protocol.models.JsonSchemaType; import java.util.List; +import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.testcontainers.containers.PostgreSQLContainer; import org.testcontainers.utility.MountableFile; @@ -56,15 +58,15 @@ protected Database setupDatabase() throws Exception { .put("ssl", false) .build()); - final Database database = Databases.createDatabase( + final DSLContext dslContext = DSLContextFactory.create( config.get("username").asText(), config.get("password").asText(), - String.format("jdbc:postgresql://%s:%s/%s", + DatabaseDriver.POSTGRESQL.getDriverClassName(), + String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), config.get("host").asText(), - config.get("port").asText(), - config.get("database").asText()), - "org.postgresql.Driver", - SQLDialect.POSTGRES); + config.get("port").asInt(), + config.get("database").asText()), SQLDialect.POSTGRES); + final Database database = new Database(dslContext); database.query(ctx -> { ctx.execute("SELECT pg_create_logical_replication_slot('" + SLOT_NAME_BASE + "', 'pgoutput');"); diff --git a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceAcceptanceTest.java index 01d1bafbaf1ff..268d8198ded8f 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceAcceptanceTest.java @@ -9,7 +9,8 @@ import com.google.common.collect.Lists; import io.airbyte.commons.json.Jsons; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.base.ssh.SshHelpers; import io.airbyte.integrations.standardtest.source.SourceAcceptanceTest; import io.airbyte.integrations.standardtest.source.TestDestinationEnv; @@ -23,6 +24,7 @@ import io.airbyte.protocol.models.SyncMode; import java.util.HashMap; import java.util.List; +import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.testcontainers.containers.PostgreSQLContainer; @@ -53,26 +55,25 @@ protected void setupEnvironment(final TestDestinationEnv environment) throws Exc .put("replication_method", replicationMethod) .build()); - final Database database = Databases.createDatabase( + try (final DSLContext dslContext = DSLContextFactory.create( config.get("username").asText(), config.get("password").asText(), - String.format("jdbc:postgresql://%s:%s/%s", + DatabaseDriver.POSTGRESQL.getDriverClassName(), + String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), config.get("host").asText(), - config.get("port").asText(), - config.get("database").asText()), - "org.postgresql.Driver", - SQLDialect.POSTGRES); - - database.query(ctx -> { - ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); - ctx.fetch("INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); - ctx.fetch("CREATE TABLE starships(id INTEGER, name VARCHAR(200));"); - ctx.fetch("INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); - ctx.fetch("CREATE MATERIALIZED VIEW testview AS select * from id_and_name where id = '2';"); - return null; - }); - - database.close(); + config.get("port").asInt(), + config.get("database").asText()), SQLDialect.POSTGRES)) { + final Database database = new Database(dslContext); + + database.query(ctx -> { + ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); + ctx.fetch("INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); + ctx.fetch("CREATE TABLE starships(id INTEGER, name VARCHAR(200));"); + ctx.fetch("INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); + ctx.fetch("CREATE MATERIALIZED VIEW testview AS select * from id_and_name where id = '2';"); + return null; + }); + } } @Override diff --git a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceDatatypeTest.java b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceDatatypeTest.java index 25ebd308cbb83..1a1df3e7f8511 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceDatatypeTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceDatatypeTest.java @@ -8,13 +8,15 @@ import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.standardtest.source.AbstractSourceDatabaseTypeTest; import io.airbyte.integrations.standardtest.source.TestDataHolder; import io.airbyte.integrations.standardtest.source.TestDestinationEnv; import io.airbyte.protocol.models.JsonSchemaType; import java.sql.SQLException; import java.util.Set; +import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.testcontainers.containers.PostgreSQLContainer; @@ -41,15 +43,15 @@ protected Database setupDatabase() throws SQLException { .put("replication_method", replicationMethod) .build()); - final Database database = Databases.createDatabase( + final DSLContext dslContext = DSLContextFactory.create( config.get("username").asText(), config.get("password").asText(), - String.format("jdbc:postgresql://%s:%s/%s", + DatabaseDriver.POSTGRESQL.getDriverClassName(), + String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), config.get("host").asText(), - config.get("port").asText(), - config.get("database").asText()), - "org.postgresql.Driver", - SQLDialect.POSTGRES); + config.get("port").asInt(), + config.get("database").asText()), SQLDialect.POSTGRES); + final Database database = new Database(dslContext); database.query(ctx -> { ctx.execute(String.format("CREATE SCHEMA %S;", SCHEMA_NAME)); diff --git a/airbyte-integrations/connectors/source-postgres/src/test-performance/java/io/airbyte/integrations/source/postgres/FillPostgresTestDbScriptTest.java b/airbyte-integrations/connectors/source-postgres/src/test-performance/java/io/airbyte/integrations/source/postgres/FillPostgresTestDbScriptTest.java index 19caac05a6224..ab16afce18ec5 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test-performance/java/io/airbyte/integrations/source/postgres/FillPostgresTestDbScriptTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test-performance/java/io/airbyte/integrations/source/postgres/FillPostgresTestDbScriptTest.java @@ -8,10 +8,12 @@ import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.standardtest.source.TestDestinationEnv; import io.airbyte.integrations.standardtest.source.performancetest.AbstractSourceFillDbWithTestData; import java.util.stream.Stream; +import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.junit.jupiter.params.provider.Arguments; @@ -47,15 +49,15 @@ protected Database setupDatabase(final String dbName) throws Exception { .put("replication_method", replicationMethod) .build()); - final Database database = Databases.createDatabase( + final DSLContext dslContext = DSLContextFactory.create( config.get("username").asText(), config.get("password").asText(), - String.format("jdbc:postgresql://%s:%s/%s", + DatabaseDriver.POSTGRESQL.getDriverClassName(), + String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), config.get("host").asText(), - config.get("port").asText(), - dbName), - "org.postgresql.Driver", - SQLDialect.POSTGRES); + config.get("port").asInt(), + config.get("database").asText()), SQLDialect.POSTGRES); + final Database database = new Database(dslContext); return database; } diff --git a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/CdcPostgresSourceTest.java b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/CdcPostgresSourceTest.java index ac4c0c98b9a60..1d5079a54fa83 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/CdcPostgresSourceTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/CdcPostgresSourceTest.java @@ -21,8 +21,11 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.string.Strings; import io.airbyte.db.Database; -import io.airbyte.db.Databases; import io.airbyte.db.PgLsn; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.base.Source; import io.airbyte.integrations.debezium.CdcSourceTest; @@ -33,6 +36,7 @@ import io.airbyte.test.utils.PostgreSQLContainerHelper; import java.sql.SQLException; import java.util.List; +import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; @@ -49,6 +53,7 @@ abstract class CdcPostgresSourceTest extends CdcSourceTest { private String dbName; private Database database; + private DSLContext dslContext; private PostgresSource source; private JsonNode config; @@ -56,7 +61,7 @@ abstract class CdcPostgresSourceTest extends CdcSourceTest { @AfterEach void tearDown() throws Exception { - database.close(); + dslContext.close(); container.close(); } @@ -76,7 +81,8 @@ protected void setup() throws SQLException { config = getConfig(dbName); final String fullReplicationSlot = SLOT_NAME_BASE + "_" + dbName; - database = getDatabaseFromConfig(config); + dslContext = getDslContext(config); + database = getDatabase(dslContext); database.query(ctx -> { ctx.execute("SELECT pg_create_logical_replication_slot('" + fullReplicationSlot + "', '" + getPluginName() + "');"); ctx.execute("CREATE PUBLICATION " + PUBLICATION + " FOR ALL TABLES;"); @@ -106,16 +112,19 @@ private JsonNode getConfig(final String dbName) { .build()); } - private Database getDatabaseFromConfig(final JsonNode config) { - return Databases.createDatabase( + private static Database getDatabase(final DSLContext dslContext) { + return new Database(dslContext); + } + + private static DSLContext getDslContext(final JsonNode config) { + return DSLContextFactory.create( config.get("username").asText(), config.get("password").asText(), - String.format("jdbc:postgresql://%s:%s/%s", + DatabaseDriver.POSTGRESQL.getDriverClassName(), + String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), config.get("host").asText(), - config.get("port").asText(), - config.get("database").asText()), - "org.postgresql.Driver", - SQLDialect.POSTGRES); + config.get("port").asInt(), + config.get("database").asText()), SQLDialect.POSTGRES); } @Test @@ -161,14 +170,18 @@ protected void assertExpectedStateMessages(final List state @Override protected CdcTargetPosition cdcLatestTargetPosition() { - final JdbcDatabase database = Databases.createJdbcDatabase( - config.get("username").asText(), - config.get("password").asText(), - String.format("jdbc:postgresql://%s:%s/%s", - config.get("host").asText(), - config.get("port").asText(), - config.get("database").asText()), - "org.postgresql.Driver"); + final JdbcDatabase database = new DefaultJdbcDatabase( + DataSourceFactory.create( + config.get("username").asText(), + config.get("password").asText(), + DatabaseDriver.POSTGRESQL.getDriverClassName(), + String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), + config.get("host").asText(), + config.get("port").asInt(), + config.get("database").asText()) + ) + ); + return PostgresCdcTargetPosition.targetPosition(database); } diff --git a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresSourceSSLTest.java b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresSourceSSLTest.java index 54021a83f236f..d93ab1369792b 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresSourceSSLTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresSourceSSLTest.java @@ -21,7 +21,9 @@ import io.airbyte.commons.string.Strings; import io.airbyte.commons.util.MoreIterators; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.protocol.models.AirbyteCatalog; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteStream; @@ -36,6 +38,8 @@ import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; +import javax.sql.DataSource; +import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; @@ -99,33 +103,40 @@ void setup() throws Exception { PostgreSQLContainerHelper.runSqlScript(MountableFile.forHostPath(tmpFilePath), PSQL_DB); final JsonNode config = getConfig(PSQL_DB, dbName); - final Database database = getDatabaseFromConfig(config); - database.query(ctx -> { - ctx.fetch("CREATE TABLE id_and_name(id NUMERIC(20, 10), name VARCHAR(200), power double precision, PRIMARY KEY (id));"); - ctx.fetch("CREATE INDEX i1 ON id_and_name (id);"); - ctx.fetch("INSERT INTO id_and_name (id, name, power) VALUES (1,'goku', 'Infinity'), (2, 'vegeta', 9000.1), ('NaN', 'piccolo', '-Infinity');"); - - ctx.fetch("CREATE TABLE id_and_name2(id NUMERIC(20, 10), name VARCHAR(200), power double precision);"); - ctx.fetch("INSERT INTO id_and_name2 (id, name, power) VALUES (1,'goku', 'Infinity'), (2, 'vegeta', 9000.1), ('NaN', 'piccolo', '-Infinity');"); - - ctx.fetch("CREATE TABLE names(first_name VARCHAR(200), last_name VARCHAR(200), power double precision, PRIMARY KEY (first_name, last_name));"); - ctx.fetch( - "INSERT INTO names (first_name, last_name, power) VALUES ('san', 'goku', 'Infinity'), ('prince', 'vegeta', 9000.1), ('piccolo', 'junior', '-Infinity');"); - return null; - }); - database.close(); + try (final DSLContext dslContext = getDslContext(config)) { + final Database database = getDatabase(dslContext); + database.query(ctx -> { + ctx.fetch("CREATE TABLE id_and_name(id NUMERIC(20, 10), name VARCHAR(200), power double precision, PRIMARY KEY (id));"); + ctx.fetch("CREATE INDEX i1 ON id_and_name (id);"); + ctx.fetch( + "INSERT INTO id_and_name (id, name, power) VALUES (1,'goku', 'Infinity'), (2, 'vegeta', 9000.1), ('NaN', 'piccolo', '-Infinity');"); + + ctx.fetch("CREATE TABLE id_and_name2(id NUMERIC(20, 10), name VARCHAR(200), power double precision);"); + ctx.fetch( + "INSERT INTO id_and_name2 (id, name, power) VALUES (1,'goku', 'Infinity'), (2, 'vegeta', 9000.1), ('NaN', 'piccolo', '-Infinity');"); + + ctx.fetch( + "CREATE TABLE names(first_name VARCHAR(200), last_name VARCHAR(200), power double precision, PRIMARY KEY (first_name, last_name));"); + ctx.fetch( + "INSERT INTO names (first_name, last_name, power) VALUES ('san', 'goku', 'Infinity'), ('prince', 'vegeta', 9000.1), ('piccolo', 'junior', '-Infinity');"); + return null; + }); + } + } + + private static Database getDatabase(final DSLContext dslContext) { + return new Database(dslContext); } - private Database getDatabaseFromConfig(final JsonNode config) { - return Databases.createDatabase( + private static DSLContext getDslContext(final JsonNode config) { + return DSLContextFactory.create( config.get("username").asText(), config.get("password").asText(), - String.format("jdbc:postgresql://%s:%s/%s?sslmode=require", + DatabaseDriver.POSTGRESQL.getDriverClassName(), + String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), config.get("host").asText(), - config.get("port").asText(), - config.get("database").asText()), - "org.postgresql.Driver", - SQLDialect.POSTGRES); + config.get("port").asInt(), + config.get("database").asText()), SQLDialect.POSTGRES); } private JsonNode getConfig(final PostgreSQLContainer psqlDb, final String dbName) { diff --git a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresSourceTest.java b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresSourceTest.java index b8b46b1b37aec..8cf536879edab 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresSourceTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresSourceTest.java @@ -21,7 +21,8 @@ import io.airbyte.commons.string.Strings; import io.airbyte.commons.util.MoreIterators; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.protocol.models.AirbyteCatalog; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteStream; @@ -36,7 +37,10 @@ import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; +import javax.xml.crypto.Data; +import org.jooq.DSLContext; import org.jooq.SQLDialect; +import org.jooq.impl.DSL; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; @@ -125,45 +129,52 @@ void setup() throws Exception { PostgreSQLContainerHelper.runSqlScript(MountableFile.forHostPath(tmpFilePath), PSQL_DB); final JsonNode config = getConfig(PSQL_DB, dbName); - final Database database = getDatabaseFromConfig(config); - database.query(ctx -> { - ctx.fetch("CREATE TABLE id_and_name(id NUMERIC(20, 10), name VARCHAR(200), power double precision, PRIMARY KEY (id));"); - ctx.fetch("CREATE INDEX i1 ON id_and_name (id);"); - ctx.fetch("INSERT INTO id_and_name (id, name, power) VALUES (1,'goku', 'Infinity'), (2, 'vegeta', 9000.1), ('NaN', 'piccolo', '-Infinity');"); - - ctx.fetch("CREATE TABLE id_and_name2(id NUMERIC(20, 10), name VARCHAR(200), power double precision);"); - ctx.fetch("INSERT INTO id_and_name2 (id, name, power) VALUES (1,'goku', 'Infinity'), (2, 'vegeta', 9000.1), ('NaN', 'piccolo', '-Infinity');"); - - ctx.fetch("CREATE TABLE names(first_name VARCHAR(200), last_name VARCHAR(200), power double precision, PRIMARY KEY (first_name, last_name));"); - ctx.fetch( - "INSERT INTO names (first_name, last_name, power) VALUES ('san', 'goku', 'Infinity'), ('prince', 'vegeta', 9000.1), ('piccolo', 'junior', '-Infinity');"); - return null; - }); - database.close(); + + try (final DSLContext dslContext = getDslContext(config)) { + final Database database = getDatabase(dslContext); + database.query(ctx -> { + ctx.fetch("CREATE TABLE id_and_name(id NUMERIC(20, 10), name VARCHAR(200), power double precision, PRIMARY KEY (id));"); + ctx.fetch("CREATE INDEX i1 ON id_and_name (id);"); + ctx.fetch( + "INSERT INTO id_and_name (id, name, power) VALUES (1,'goku', 'Infinity'), (2, 'vegeta', 9000.1), ('NaN', 'piccolo', '-Infinity');"); + + ctx.fetch("CREATE TABLE id_and_name2(id NUMERIC(20, 10), name VARCHAR(200), power double precision);"); + ctx.fetch( + "INSERT INTO id_and_name2 (id, name, power) VALUES (1,'goku', 'Infinity'), (2, 'vegeta', 9000.1), ('NaN', 'piccolo', '-Infinity');"); + + ctx.fetch( + "CREATE TABLE names(first_name VARCHAR(200), last_name VARCHAR(200), power double precision, PRIMARY KEY (first_name, last_name));"); + ctx.fetch( + "INSERT INTO names (first_name, last_name, power) VALUES ('san', 'goku', 'Infinity'), ('prince', 'vegeta', 9000.1), ('piccolo', 'junior', '-Infinity');"); + return null; + }); + } } - private static Database getDatabaseWithSpecifiedUser(final JsonNode config, final String username, final String password) { - return Databases.createDatabase( + private static DSLContext getDslContextWithSpecifiedUser(final JsonNode config, final String username, final String password) { + return DSLContextFactory.create( username, password, - String.format("jdbc:postgresql://%s:%s/%s", + DatabaseDriver.POSTGRESQL.getDriverClassName(), + String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), config.get("host").asText(), - config.get("port").asText(), - config.get("database").asText()), - "org.postgresql.Driver", - SQLDialect.POSTGRES); + config.get("port").asInt(), + config.get("database").asText()), SQLDialect.POSTGRES); + } + + private static Database getDatabase(final DSLContext dslContext) { + return new Database(dslContext); } - private static Database getDatabaseFromConfig(final JsonNode config) { - return Databases.createDatabase( + private static DSLContext getDslContext(final JsonNode config) { + return DSLContextFactory.create( config.get("username").asText(), config.get("password").asText(), - String.format("jdbc:postgresql://%s:%s/%s", + DatabaseDriver.POSTGRESQL.getDriverClassName(), + String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), config.get("host").asText(), - config.get("port").asText(), - config.get("database").asText()), - "org.postgresql.Driver", - SQLDialect.POSTGRES); + config.get("port").asInt(), + config.get("database").asText()), SQLDialect.POSTGRES); } private JsonNode getConfig(final PostgreSQLContainer psqlDb, final String dbName) { @@ -210,7 +221,8 @@ public void testCanReadUtf8() throws Exception { try (final PostgreSQLContainer db = new PostgreSQLContainer<>("postgres:13-alpine").withCommand("postgres -c client_encoding=sql_ascii")) { db.start(); final JsonNode config = getConfig(db); - try (final Database database = getDatabaseFromConfig(config)) { + try (final DSLContext dslContext = getDslContext(config)) { + final Database database = getDatabase(dslContext); database.query(ctx -> { ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); ctx.fetch("INSERT INTO id_and_name (id, name) VALUES (1,E'\\u2013 someutfstring'), (2, E'\\u2215');"); @@ -231,7 +243,8 @@ void testUserDoesntHasPrivilegesToSelectTable() throws Exception { try (final PostgreSQLContainer db = new PostgreSQLContainer<>("postgres:13-alpine")) { db.start(); final JsonNode config = getConfig(db); - try (final Database database = getDatabaseFromConfig(config)) { + try (final DSLContext dslContext = getDslContext(config)) { + final Database database = new Database(dslContext); database.query(ctx -> { ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); ctx.fetch("INSERT INTO id_and_name (id, name) VALUES (1,'John'), (2, 'Alfred'), (3, 'Alex');"); @@ -241,7 +254,8 @@ void testUserDoesntHasPrivilegesToSelectTable() throws Exception { return null; }); } - try (final Database database = getDatabaseWithSpecifiedUser(config, "test_user_3", "132")) { + try (final DSLContext dslContext = getDslContextWithSpecifiedUser(config, "test_user_3", "132")) { + final Database database = new Database(dslContext); database.query(ctx -> { ctx.fetch("CREATE TABLE id_and_name_3(id INTEGER, name VARCHAR(200));"); ctx.fetch("CREATE VIEW id_and_name_3_view(id, name) as\n" @@ -279,7 +293,8 @@ void testDiscoverRecursiveRolePermissions() throws Exception { try (final PostgreSQLContainer db = new PostgreSQLContainer<>("postgres:13-alpine")) { db.start(); final JsonNode config = getConfig(db); - try (final Database database = getDatabaseFromConfig(config)) { + try (final DSLContext dslContext = getDslContext(config)) { + final Database database = new Database(dslContext); database.query(ctx -> { ctx.fetch("CREATE TABLE id_and_name_7(id INTEGER, name VARCHAR(200));"); ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); @@ -303,7 +318,8 @@ void testDiscoverRecursiveRolePermissions() throws Exception { return null; }); } - try (final Database database = getDatabaseWithSpecifiedUser(config, "test_user_4", "132")) { + try (final DSLContext dslContext = getDslContextWithSpecifiedUser(config, "test_user_4", "132")) { + final Database database = new Database(dslContext); database.query(ctx -> { ctx.fetch("CREATE TABLE id_and_name_3(id INTEGER, name VARCHAR(200));"); return null; diff --git a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresStressTest.java b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresStressTest.java index 75773468b5776..f7e60e1555970 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresStressTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresStressTest.java @@ -9,6 +9,7 @@ import io.airbyte.commons.io.IOs; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.string.Strings; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.db.jdbc.streaming.AdaptiveStreamingQueryConfig; import io.airbyte.integrations.base.IntegrationRunner; @@ -95,7 +96,7 @@ private static class PostgresTestSource extends AbstractJdbcSource imp private static final Logger LOGGER = LoggerFactory.getLogger(PostgresTestSource.class); - static final String DRIVER_CLASS = "org.postgresql.Driver"; + static final String DRIVER_CLASS = DatabaseDriver.POSTGRESQL.getDriverClassName(); public PostgresTestSource() { super(DRIVER_CLASS, AdaptiveStreamingQueryConfig::new, JdbcUtils.getDefaultSourceOperations()); @@ -105,9 +106,9 @@ public PostgresTestSource() { public JsonNode toDatabaseConfig(final JsonNode config) { final ImmutableMap.Builder configBuilder = ImmutableMap.builder() .put("username", config.get("username").asText()) - .put("jdbc_url", String.format("jdbc:postgresql://%s:%s/%s", + .put("jdbc_url", String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), config.get("host").asText(), - config.get("port").asText(), + config.get("port").asInt(), config.get("database").asText())); if (config.has("password")) { diff --git a/airbyte-integrations/connectors/source-redshift/src/main/java/io/airbyte/integrations/source/redshift/RedshiftSource.java b/airbyte-integrations/connectors/source-redshift/src/main/java/io/airbyte/integrations/source/redshift/RedshiftSource.java index b81e77dd71a1a..e5340d130f659 100644 --- a/airbyte-integrations/connectors/source-redshift/src/main/java/io/airbyte/integrations/source/redshift/RedshiftSource.java +++ b/airbyte-integrations/connectors/source-redshift/src/main/java/io/airbyte/integrations/source/redshift/RedshiftSource.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.db.jdbc.streaming.AdaptiveStreamingQueryConfig; @@ -29,7 +30,7 @@ public class RedshiftSource extends AbstractJdbcSource implements Source { private static final Logger LOGGER = LoggerFactory.getLogger(RedshiftSource.class); - public static final String DRIVER_CLASS = "com.amazon.redshift.jdbc.Driver"; + public static final String DRIVER_CLASS = DatabaseDriver.REDSHIFT.getDriverClassName(); private static final String SCHEMAS = "schemas"; private List schemas; @@ -45,9 +46,9 @@ public JsonNode toDatabaseConfig(final JsonNode redshiftConfig) { final ImmutableMap.Builder builder = ImmutableMap.builder() .put("username", redshiftConfig.get("username").asText()) .put("password", redshiftConfig.get("password").asText()) - .put("jdbc_url", String.format("jdbc:redshift://%s:%s/%s", + .put("jdbc_url", String.format(DatabaseDriver.REDSHIFT.getUrlFormatString(), redshiftConfig.get("host").asText(), - redshiftConfig.get("port").asText(), + redshiftConfig.get("port").asInt(), redshiftConfig.get("database").asText())); if (redshiftConfig.has(SCHEMAS) && redshiftConfig.get(SCHEMAS).isArray()) { diff --git a/airbyte-integrations/connectors/source-redshift/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/RedshiftSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-redshift/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/RedshiftSourceAcceptanceTest.java index 59a7b01407f2a..69111c7b9e2c5 100644 --- a/airbyte-integrations/connectors/source-redshift/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/RedshiftSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-redshift/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/RedshiftSourceAcceptanceTest.java @@ -12,7 +12,9 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.resources.MoreResources; import io.airbyte.commons.string.Strings; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.integrations.source.redshift.RedshiftSource; @@ -79,14 +81,17 @@ protected void setupEnvironment(final TestDestinationEnv environment) throws Exc } protected JdbcDatabase createDatabase(final JsonNode config) { - return Databases.createJdbcDatabase( - config.get("username").asText(), - config.get("password").asText(), - String.format("jdbc:redshift://%s:%s/%s", - config.get("host").asText(), - config.get("port").asText(), - config.get("database").asText()), - RedshiftSource.DRIVER_CLASS); + return new DefaultJdbcDatabase( + DataSourceFactory.create( + config.get("username").asText(), + config.get("password").asText(), + RedshiftSource.DRIVER_CLASS, + String.format(DatabaseDriver.REDSHIFT.getUrlFormatString(), + config.get("host").asText(), + config.get("port").asInt(), + config.get("database").asText()) + ) + ); } protected void createTestUser(final JdbcDatabase database, final JsonNode config, final String testUserName, final String testUserPassword) diff --git a/airbyte-integrations/connectors/source-redshift/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/RedshiftSslSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-redshift/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/RedshiftSslSourceAcceptanceTest.java index a6f40182ee9cd..dca9f744716d5 100644 --- a/airbyte-integrations/connectors/source-redshift/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/RedshiftSslSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-redshift/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/RedshiftSslSourceAcceptanceTest.java @@ -5,25 +5,29 @@ package io.airbyte.integrations.io.airbyte.integration_tests.sources; import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.db.jdbc.JdbcUtils; -import io.airbyte.integrations.source.redshift.RedshiftSource; public class RedshiftSslSourceAcceptanceTest extends RedshiftSourceAcceptanceTest { @Override protected JdbcDatabase createDatabase(final JsonNode config) { - return Databases.createJdbcDatabase( - config.get("username").asText(), - config.get("password").asText(), - String.format("jdbc:redshift://%s:%s/%s", - config.get("host").asText(), - config.get("port").asText(), - config.get("database").asText()), - RedshiftSource.DRIVER_CLASS, - JdbcUtils.parseJdbcParameters("ssl=true&" + - "sslfactory=com.amazon.redshift.ssl.NonValidatingFactory")); + return new DefaultJdbcDatabase( + DataSourceFactory.create( + config.get("username").asText(), + config.get("password").asText(), + DatabaseDriver.REDSHIFT.getDriverClassName(), + String.format(DatabaseDriver.REDSHIFT.getUrlFormatString(), + config.get("host").asText(), + config.get("port").asInt(), + config.get("database").asText()), + JdbcUtils.parseJdbcParameters("ssl=true&" + + "sslfactory=com.amazon.redshift.ssl.NonValidatingFactory") + ) + ); } } diff --git a/airbyte-integrations/connectors/source-relational-db/build.gradle b/airbyte-integrations/connectors/source-relational-db/build.gradle index 958e6e7dd9b77..acbf9bd2cbc4a 100644 --- a/airbyte-integrations/connectors/source-relational-db/build.gradle +++ b/airbyte-integrations/connectors/source-relational-db/build.gradle @@ -16,8 +16,8 @@ dependencies { testImplementation project(':airbyte-test-utils') - testImplementation "org.postgresql:postgresql:42.2.18" - testImplementation "org.testcontainers:postgresql:1.15.3" + testImplementation libs.postgresql + testImplementation libs.testcontainers.postgresql implementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) } diff --git a/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeSource.java b/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeSource.java index a22abe03a10b9..0163044b23361 100644 --- a/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeSource.java +++ b/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeSource.java @@ -11,6 +11,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.db.jdbc.StreamingJdbcDatabase; import io.airbyte.db.jdbc.streaming.AdaptiveStreamingQueryConfig; @@ -30,7 +31,7 @@ public class SnowflakeSource extends AbstractJdbcSource implements Source { private static final Logger LOGGER = LoggerFactory.getLogger(SnowflakeSource.class); - public static final String DRIVER_CLASS = "net.snowflake.client.jdbc.SnowflakeDriver"; + public static final String DRIVER_CLASS = DatabaseDriver.SNOWFLAKE.getDriverClassName(); public static final ScheduledExecutorService SCHEDULED_EXECUTOR_SERVICE = Executors.newScheduledThreadPool(1); public SnowflakeSource() { diff --git a/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceAcceptanceTest.java index 9b676083f03e9..4d4c8a5b74268 100644 --- a/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceAcceptanceTest.java @@ -12,7 +12,9 @@ import io.airbyte.commons.io.IOs; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.resources.MoreResources; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.source.snowflake.SnowflakeSource; import io.airbyte.integrations.standardtest.source.SourceAcceptanceTest; @@ -126,15 +128,17 @@ protected void tearDown(final TestDestinationEnv testEnv) throws Exception { protected JdbcDatabase setupDataBase() { config = Jsons.clone(getStaticConfig()); - return Databases.createJdbcDatabase( - config.get("credentials").get("username").asText(), - config.get("credentials").get("password").asText(), - String.format("jdbc:snowflake://%s/", - config.get("host").asText()), - SnowflakeSource.DRIVER_CLASS, - Map.of("role", config.get("role").asText(), - "warehouse", config.get("warehouse").asText(), - "database", config.get("database").asText())); + return new DefaultJdbcDatabase( + DataSourceFactory.create( + config.get("credentials").get("username").asText(), + config.get("credentials").get("password").asText(), + SnowflakeSource.DRIVER_CLASS, + String.format(DatabaseDriver.SNOWFLAKE.getUrlFormatString(), config.get("host").asText()), + Map.of("role", config.get("role").asText(), + "warehouse", config.get("warehouse").asText(), + "database", config.get("database").asText()) + ) + ); } @Test diff --git a/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceDatatypeTest.java b/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceDatatypeTest.java index df49c9884d3fa..05a581e841728 100644 --- a/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceDatatypeTest.java +++ b/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceDatatypeTest.java @@ -8,7 +8,8 @@ import io.airbyte.commons.io.IOs; import io.airbyte.commons.json.Jsons; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.source.snowflake.SnowflakeSource; import io.airbyte.integrations.standardtest.source.AbstractSourceDatabaseTypeTest; import io.airbyte.integrations.standardtest.source.TestDataHolder; @@ -17,7 +18,9 @@ import java.nio.file.Path; import java.util.Map; import org.apache.commons.lang3.RandomStringUtils; +import org.jooq.DSLContext; import org.jooq.SQLDialect; +import org.junit.jupiter.api.BeforeEach; public class SnowflakeSourceDatatypeTest extends AbstractSourceDatabaseTypeTest { @@ -27,6 +30,7 @@ public class SnowflakeSourceDatatypeTest extends AbstractSourceDatabaseTypeTest private JsonNode config; private Database database; + private DSLContext dslContext; @Override protected String getImageName() { @@ -50,12 +54,17 @@ protected Database setupDatabase() throws Exception { } private Database getDatabase() { - return Databases.createDatabase( + return new Database(dslContext); + } + + @Override + protected void setupEnvironment(final TestDestinationEnv environment) throws Exception { + super.setupEnvironment(environment); + dslContext = DSLContextFactory.create( config.get("credentials").get("username").asText(), config.get("credentials").get("password").asText(), - String.format("jdbc:snowflake://%s/", - config.get("host").asText()), SnowflakeSource.DRIVER_CLASS, + String.format(DatabaseDriver.SNOWFLAKE.getUrlFormatString(), config.get("host").asText()), SQLDialect.DEFAULT, Map.of( "role", config.get("role").asText(), @@ -69,7 +78,7 @@ protected void tearDown(final TestDestinationEnv testEnv) throws Exception { .format("DROP SCHEMA IF EXISTS %s", SCHEMA_NAME); database = getDatabase(); database.query(ctx -> ctx.fetch(dropSchemaQuery)); - database.close(); + dslContext.close(); } @Override diff --git a/airbyte-integrations/connectors/source-tidb/build.gradle b/airbyte-integrations/connectors/source-tidb/build.gradle index efcb2bf76b0d5..91f88f58cde3d 100755 --- a/airbyte-integrations/connectors/source-tidb/build.gradle +++ b/airbyte-integrations/connectors/source-tidb/build.gradle @@ -20,7 +20,7 @@ dependencies { implementation 'mysql:mysql-connector-java:8.0.22' // Add testcontainers and use GenericContainer for TiDB - implementation "org.testcontainers:testcontainers:1.16.3" + implementation libs.testcontainers testImplementation testFixtures(project(':airbyte-integrations:connectors:source-jdbc')) @@ -29,7 +29,7 @@ dependencies { integrationTestJavaImplementation project(':airbyte-integrations:connectors:source-tidb') integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-source-test') - integrationTestJavaImplementation "org.testcontainers:testcontainers:1.16.3" + integrationTestJavaImplementation libs.testcontainers implementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) integrationTestJavaImplementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) diff --git a/airbyte-integrations/connectors/source-tidb/src/main/java/io/airbyte/integrations/source/tidb/TiDBSource.java b/airbyte-integrations/connectors/source-tidb/src/main/java/io/airbyte/integrations/source/tidb/TiDBSource.java index f10efc8b6e163..c74df03c7a09c 100644 --- a/airbyte-integrations/connectors/source-tidb/src/main/java/io/airbyte/integrations/source/tidb/TiDBSource.java +++ b/airbyte-integrations/connectors/source-tidb/src/main/java/io/airbyte/integrations/source/tidb/TiDBSource.java @@ -8,6 +8,7 @@ import com.google.common.collect.ImmutableMap; import com.mysql.cj.MysqlType; import io.airbyte.commons.json.Jsons; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.db.jdbc.streaming.AdaptiveStreamingQueryConfig; import io.airbyte.integrations.base.IntegrationRunner; import io.airbyte.integrations.base.Source; @@ -22,7 +23,7 @@ public class TiDBSource extends AbstractJdbcSource implements Source private static final Logger LOGGER = LoggerFactory.getLogger(TiDBSource.class); - static final String DRIVER_CLASS = "com.mysql.cj.jdbc.Driver"; + static final String DRIVER_CLASS = DatabaseDriver.MYSQL.getDriverClassName(); public static final List SSL_PARAMETERS = List.of( "useSSL=true", "requireSSL=true", @@ -38,9 +39,9 @@ public TiDBSource() { @Override public JsonNode toDatabaseConfig(final JsonNode config) { - final StringBuilder jdbcUrl = new StringBuilder(String.format("jdbc:mysql://%s:%s/%s", + final StringBuilder jdbcUrl = new StringBuilder(String.format(DatabaseDriver.MYSQL.getUrlFormatString(), config.get("host").asText(), - config.get("port").asText(), + config.get("port").asInt(), config.get("database").asText())); if (config.get("jdbc_url_params") != null diff --git a/airbyte-integrations/connectors/source-tidb/src/test-integration/java/io/airbyte/integrations/source/tidb/TiDBSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-tidb/src/test-integration/java/io/airbyte/integrations/source/tidb/TiDBSourceAcceptanceTest.java index 7dd89937de5d8..834e55177efe8 100755 --- a/airbyte-integrations/connectors/source-tidb/src/test-integration/java/io/airbyte/integrations/source/tidb/TiDBSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-tidb/src/test-integration/java/io/airbyte/integrations/source/tidb/TiDBSourceAcceptanceTest.java @@ -9,12 +9,21 @@ import com.google.common.collect.Lists; import io.airbyte.commons.json.Jsons; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.base.ssh.SshHelpers; import io.airbyte.integrations.standardtest.source.SourceAcceptanceTest; import io.airbyte.integrations.standardtest.source.TestDestinationEnv; -import io.airbyte.protocol.models.*; +import io.airbyte.protocol.models.CatalogHelpers; +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import io.airbyte.protocol.models.ConfiguredAirbyteStream; +import io.airbyte.protocol.models.ConnectorSpecification; +import io.airbyte.protocol.models.DestinationSyncMode; +import io.airbyte.protocol.models.Field; +import io.airbyte.protocol.models.JsonSchemaType; +import io.airbyte.protocol.models.SyncMode; import java.util.HashMap; +import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.testcontainers.containers.GenericContainer; import org.testcontainers.utility.DockerImageName; @@ -39,25 +48,25 @@ protected void setupEnvironment(final TestDestinationEnv testEnv) throws Excepti .put("username", "root") .put("database", "test") .build()); - final Database database = Databases.createDatabase( + + try (final DSLContext dslContext = DSLContextFactory.create( config.get("username").asText(), "", - String.format("jdbc:mysql://%s:%s/%s", + DatabaseDriver.MYSQL.getDriverClassName(), + String.format(DatabaseDriver.MYSQL.getUrlFormatString(), config.get("host").asText(), - config.get("port").asText(), - config.get("database").asText()), - "com.mysql.cj.jdbc.Driver", - SQLDialect.MYSQL); - - database.query(ctx -> { - ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); - ctx.fetch("INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); - ctx.fetch("CREATE TABLE starships(id INTEGER, name VARCHAR(200));"); - ctx.fetch("INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); - return null; - }); - - database.close(); + config.get("port").asInt(), + config.get("database").asText()), SQLDialect.MYSQL)) { + final Database database = new Database(dslContext); + + database.query(ctx -> { + ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); + ctx.fetch("INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); + ctx.fetch("CREATE TABLE starships(id INTEGER, name VARCHAR(200));"); + ctx.fetch("INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); + return null; + }); + } } @Override diff --git a/airbyte-metrics/lib/build.gradle b/airbyte-metrics/lib/build.gradle index fddad3b6e31ee..56a14d773707e 100644 --- a/airbyte-metrics/lib/build.gradle +++ b/airbyte-metrics/lib/build.gradle @@ -11,5 +11,6 @@ dependencies { implementation 'com.datadoghq:java-dogstatsd-client:4.0.0' testImplementation project(':airbyte-config:persistence') - testImplementation 'org.testcontainers:postgresql:1.15.3' + testImplementation project(':airbyte-test-utils') + testImplementation libs.testcontainers.postgresql } diff --git a/airbyte-metrics/lib/src/test/java/io/airbyte/metrics/lib/MetrisQueriesTest.java b/airbyte-metrics/lib/src/test/java/io/airbyte/metrics/lib/MetricsQueriesTest.java similarity index 97% rename from airbyte-metrics/lib/src/test/java/io/airbyte/metrics/lib/MetrisQueriesTest.java rename to airbyte-metrics/lib/src/test/java/io/airbyte/metrics/lib/MetricsQueriesTest.java index 0dd5496868bd6..46bd59b185956 100644 --- a/airbyte-metrics/lib/src/test/java/io/airbyte/metrics/lib/MetrisQueriesTest.java +++ b/airbyte-metrics/lib/src/test/java/io/airbyte/metrics/lib/MetricsQueriesTest.java @@ -13,25 +13,30 @@ import static io.airbyte.db.instance.configs.jooq.Tables.ACTOR_DEFINITION; import static io.airbyte.db.instance.configs.jooq.Tables.CONNECTION; import static io.airbyte.db.instance.configs.jooq.Tables.WORKSPACE; -import static io.airbyte.db.instance.jobs.jooq.Tables.*; +import static io.airbyte.db.instance.jobs.jooq.Tables.JOBS; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; import io.airbyte.db.Database; +import io.airbyte.db.factory.DSLContextFactory; import io.airbyte.db.instance.configs.jooq.enums.ActorType; import io.airbyte.db.instance.configs.jooq.enums.NamespaceDefinitionType; import io.airbyte.db.instance.configs.jooq.enums.ReleaseStage; import io.airbyte.db.instance.configs.jooq.enums.StatusType; import io.airbyte.db.instance.jobs.jooq.enums.JobStatus; import io.airbyte.db.instance.test.TestDatabaseProviders; +import io.airbyte.test.utils.DatabaseConnectionHelper; import java.io.IOException; import java.sql.SQLException; import java.time.OffsetDateTime; import java.time.temporal.ChronoUnit; import java.util.List; import java.util.UUID; +import javax.sql.DataSource; import org.apache.commons.lang3.tuple.ImmutablePair; +import org.jooq.DSLContext; import org.jooq.JSONB; +import org.jooq.SQLDialect; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.DisplayName; @@ -39,7 +44,7 @@ import org.junit.jupiter.api.Test; import org.testcontainers.containers.PostgreSQLContainer; -public class MetrisQueriesTest { +public class MetricsQueriesTest { private static final String USER = "user"; private static final String PASS = "hunter2"; @@ -51,12 +56,14 @@ public class MetrisQueriesTest { @BeforeAll static void setUpAll() throws IOException, SQLException { - PostgreSQLContainer container = new PostgreSQLContainer<>("postgres:13-alpine") + final PostgreSQLContainer container = new PostgreSQLContainer<>("postgres:13-alpine") .withUsername(USER) .withPassword(PASS); container.start(); - final TestDatabaseProviders databaseProviders = new TestDatabaseProviders(container); + final DataSource dataSource = DatabaseConnectionHelper.createDataSource(container); + final DSLContext dslContext = DSLContextFactory.create(dataSource, SQLDialect.POSTGRES); + final TestDatabaseProviders databaseProviders = new TestDatabaseProviders(dataSource, dslContext); configDb = databaseProviders.createNewConfigsDatabase(); databaseProviders.createNewJobsDatabase(); @@ -332,7 +339,7 @@ void tearDown() throws SQLException { @Test @DisplayName("should return only connections per workspace") void shouldReturnNumConnectionsBasic() throws SQLException { - var workspaceId = UUID.randomUUID(); + final var workspaceId = UUID.randomUUID(); configDb.transaction( ctx -> ctx.insertInto(WORKSPACE, WORKSPACE.ID, WORKSPACE.NAME, WORKSPACE.TOMBSTONE).values(workspaceId, "test-0", false) .execute()); @@ -363,7 +370,7 @@ void shouldReturnNumConnectionsBasic() throws SQLException { @Test @DisplayName("should ignore deleted connections") void shouldIgnoreNonRunningConnections() throws SQLException { - var workspaceId = UUID.randomUUID(); + final var workspaceId = UUID.randomUUID(); configDb.transaction( ctx -> ctx.insertInto(WORKSPACE, WORKSPACE.ID, WORKSPACE.NAME, WORKSPACE.TOMBSTONE).values(workspaceId, "test-0", false) .execute()); @@ -396,7 +403,7 @@ void shouldIgnoreNonRunningConnections() throws SQLException { @Test @DisplayName("should ignore deleted connections") void shouldIgnoreDeletedWorkspaces() throws SQLException { - var workspaceId = UUID.randomUUID(); + final var workspaceId = UUID.randomUUID(); configDb.transaction( ctx -> ctx.insertInto(WORKSPACE, WORKSPACE.ID, WORKSPACE.NAME, WORKSPACE.TOMBSTONE).values(workspaceId, "test-0", true) .execute()); diff --git a/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/ReporterApp.java b/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/ReporterApp.java index be2a17ae645a2..4e5f13c335f4a 100644 --- a/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/ReporterApp.java +++ b/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/ReporterApp.java @@ -4,16 +4,23 @@ package io.airbyte.metrics.reporter; +import io.airbyte.commons.lang.CloseableShutdownHook; import io.airbyte.config.Configs; import io.airbyte.config.EnvConfigs; import io.airbyte.db.Database; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.db.instance.configs.ConfigsDatabaseInstance; import io.airbyte.metrics.lib.DatadogClientConfiguration; import io.airbyte.metrics.lib.DogStatsDMetricSingleton; import io.airbyte.metrics.lib.MetricEmittingApps; import java.io.IOException; import java.util.concurrent.Executors; +import javax.sql.DataSource; import lombok.extern.slf4j.Slf4j; +import org.jooq.DSLContext; +import org.jooq.SQLDialect; @Slf4j public class ReporterApp { @@ -25,18 +32,27 @@ public static void main(final String[] args) throws IOException { DogStatsDMetricSingleton.initialize(MetricEmittingApps.METRICS_REPORTER, new DatadogClientConfiguration(configs)); - configDatabase = new ConfigsDatabaseInstance( + final DataSource dataSource = DataSourceFactory.create( configs.getConfigDatabaseUser(), configs.getConfigDatabasePassword(), - configs.getConfigDatabaseUrl()) - .getInitialized(); + DatabaseDriver.POSTGRESQL.getDriverClassName(), + configs.getConfigDatabaseUrl()); - final var toEmits = ToEmit.values(); - final var pollers = Executors.newScheduledThreadPool(toEmits.length); + try (final DSLContext dslContext = DSLContextFactory.create(dataSource, SQLDialect.POSTGRES)) { - log.info("Scheduling {} metrics for emission..", toEmits.length); - for (ToEmit toEmit : toEmits) { - pollers.scheduleAtFixedRate(toEmit.emitRunnable, 0, toEmit.period, toEmit.timeUnit); + // Ensure that the database resources are closed on application shutdown + CloseableShutdownHook.registerRuntimeShutdownHook(dataSource, dslContext); + + configDatabase = new ConfigsDatabaseInstance(dslContext) + .getInitialized(); + + final var toEmits = ToEmit.values(); + final var pollers = Executors.newScheduledThreadPool(toEmits.length); + + log.info("Scheduling {} metrics for emission..", toEmits.length); + for (final ToEmit toEmit : toEmits) { + pollers.scheduleAtFixedRate(toEmit.emitRunnable, 0, toEmit.period, toEmit.timeUnit); + } } } diff --git a/airbyte-scheduler/app/build.gradle b/airbyte-scheduler/app/build.gradle index 656dab8ecc6aa..b5f1104686997 100644 --- a/airbyte-scheduler/app/build.gradle +++ b/airbyte-scheduler/app/build.gradle @@ -18,7 +18,7 @@ dependencies { implementation project(':airbyte-scheduler:persistence') implementation project(':airbyte-workers') - testImplementation "org.testcontainers:postgresql:1.15.3" + testImplementation libs.testcontainers.postgresql } application { diff --git a/airbyte-scheduler/app/src/main/java/io/airbyte/scheduler/app/SchedulerApp.java b/airbyte-scheduler/app/src/main/java/io/airbyte/scheduler/app/SchedulerApp.java index 2f665b4e30c8a..81d55e9e19ddf 100644 --- a/airbyte-scheduler/app/src/main/java/io/airbyte/scheduler/app/SchedulerApp.java +++ b/airbyte-scheduler/app/src/main/java/io/airbyte/scheduler/app/SchedulerApp.java @@ -15,6 +15,7 @@ import io.airbyte.commons.concurrency.GracefulShutdownHandler; import io.airbyte.commons.features.EnvVariableFeatureFlags; import io.airbyte.commons.features.FeatureFlags; +import io.airbyte.commons.lang.CloseableShutdownHook; import io.airbyte.commons.version.AirbyteVersion; import io.airbyte.config.Configs; import io.airbyte.config.Configs.WorkerEnvironment; @@ -26,6 +27,8 @@ import io.airbyte.config.persistence.DatabaseConfigPersistence; import io.airbyte.config.persistence.split_secrets.JsonSecretsProcessor; import io.airbyte.db.Database; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DataSourceFactory; import io.airbyte.db.instance.configs.ConfigsDatabaseInstance; import io.airbyte.db.instance.jobs.JobsDatabaseInstance; import io.airbyte.metrics.lib.DatadogClientConfiguration; @@ -52,6 +55,9 @@ import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; +import javax.sql.DataSource; +import org.jooq.DSLContext; +import org.jooq.SQLDialect; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.slf4j.MDC; @@ -76,6 +82,7 @@ public class SchedulerApp { private static final Duration SCHEDULING_DELAY = Duration.ofSeconds(5); private static final Duration CLEANING_DELAY = Duration.ofHours(2); private static final ThreadFactory THREAD_FACTORY = new ThreadFactoryBuilder().setNameFormat("worker-%d").build(); + private static final String DRIVER_CLASS_NAME = "org.postgresql.Driver"; private final Path workspaceRoot; private final JobPersistence jobPersistence; @@ -234,65 +241,71 @@ public static void main(final String[] args) throws IOException, InterruptedExce final String temporalHost = configs.getTemporalHost(); LOGGER.info("temporalHost = " + temporalHost); - // Wait for the server to initialize the database and run migration - // This should be converted into check for the migration version. Everything else as per. - waitForServer(configs); - LOGGER.info("Creating Job DB connection pool..."); - final Database jobDatabase = new JobsDatabaseInstance( - configs.getDatabaseUser(), - configs.getDatabasePassword(), - configs.getDatabaseUrl()) - .getInitialized(); - - final Database configDatabase = new ConfigsDatabaseInstance( - configs.getConfigDatabaseUser(), - configs.getConfigDatabasePassword(), - configs.getConfigDatabaseUrl()) - .getInitialized(); - final FeatureFlags featureFlags = new EnvVariableFeatureFlags(); - final JsonSecretsProcessor jsonSecretsProcessor = JsonSecretsProcessor.builder() - .maskSecrets(!featureFlags.exposeSecretsInExport()) - .copySecrets(true) - .build(); - final ConfigPersistence configPersistence = DatabaseConfigPersistence.createWithValidation(configDatabase, jsonSecretsProcessor); - final ConfigRepository configRepository = new ConfigRepository(configPersistence, configDatabase); - - final JobPersistence jobPersistence = new DefaultJobPersistence(jobDatabase); - final JobCleaner jobCleaner = new JobCleaner( - configs.getWorkspaceRetentionConfig(), - workspaceRoot, - jobPersistence); - AirbyteVersion.assertIsCompatible( - configs.getAirbyteVersion(), - jobPersistence.getVersion().map(AirbyteVersion::new).orElseThrow()); - - TrackingClientSingleton.initialize( - configs.getTrackingStrategy(), - new Deployment(configs.getDeploymentMode(), jobPersistence.getDeployment().orElseThrow(), configs.getWorkerEnvironment()), - configs.getAirbyteRole(), - configs.getAirbyteVersion(), - configRepository); - final JobNotifier jobNotifier = new JobNotifier( - configs.getWebappUrl(), - configRepository, - new WorkspaceHelper(configRepository, jobPersistence), - TrackingClientSingleton.get()); - final TemporalClient temporalClient = TemporalClient.production(temporalHost, workspaceRoot, configs); - - DogStatsDMetricSingleton.initialize(MetricEmittingApps.SCHEDULER, new DatadogClientConfiguration(configs)); - - LOGGER.info("Launching scheduler..."); - new SchedulerApp( - workspaceRoot, - jobPersistence, - configRepository, - jobCleaner, - jobNotifier, - temporalClient, - Integer.parseInt(configs.getSubmitterNumThreads()), - configs.getSyncJobMaxAttempts(), - configs.getAirbyteVersionOrWarning(), configs.getWorkerEnvironment(), configs.getLogConfigs()) - .start(); + final DataSource configsDataSource = DataSourceFactory.create(configs.getConfigDatabaseUser(), configs.getConfigDatabasePassword(), + DRIVER_CLASS_NAME, configs.getConfigDatabaseUrl()); + + final DataSource jobsDataSource = DataSourceFactory.create(configs.getDatabaseUser(), configs.getDatabasePassword(), + DRIVER_CLASS_NAME, configs.getDatabaseUrl()); + + // Manual configuration that will be replaced by Dependency Injection in the future + try (final DSLContext configsDslContext = DSLContextFactory.create(configsDataSource, SQLDialect.POSTGRES); + final DSLContext jobsDslContext = DSLContextFactory.create(jobsDataSource, SQLDialect.POSTGRES)) { + + // Ensure that the database resources are closed on application shutdown + CloseableShutdownHook.registerRuntimeShutdownHook(configsDataSource, jobsDataSource, configsDslContext, jobsDslContext); + + // Wait for the server to initialize the database and run migration + // This should be converted into check for the migration version. Everything else as per. + waitForServer(configs); + LOGGER.info("Creating Job DB connection pool..."); + final Database jobDatabase = new JobsDatabaseInstance(jobsDslContext).getInitialized(); + + final Database configDatabase = new ConfigsDatabaseInstance(configsDslContext).getInitialized(); + final FeatureFlags featureFlags = new EnvVariableFeatureFlags(); + final JsonSecretsProcessor jsonSecretsProcessor = JsonSecretsProcessor.builder() + .maskSecrets(!featureFlags.exposeSecretsInExport()) + .copySecrets(true) + .build(); + final ConfigPersistence configPersistence = DatabaseConfigPersistence.createWithValidation(configDatabase, jsonSecretsProcessor); + final ConfigRepository configRepository = new ConfigRepository(configPersistence, configDatabase); + + final JobPersistence jobPersistence = new DefaultJobPersistence(jobDatabase); + final JobCleaner jobCleaner = new JobCleaner( + configs.getWorkspaceRetentionConfig(), + workspaceRoot, + jobPersistence); + AirbyteVersion.assertIsCompatible( + configs.getAirbyteVersion(), + jobPersistence.getVersion().map(AirbyteVersion::new).orElseThrow()); + + TrackingClientSingleton.initialize( + configs.getTrackingStrategy(), + new Deployment(configs.getDeploymentMode(), jobPersistence.getDeployment().orElseThrow(), configs.getWorkerEnvironment()), + configs.getAirbyteRole(), + configs.getAirbyteVersion(), + configRepository); + final JobNotifier jobNotifier = new JobNotifier( + configs.getWebappUrl(), + configRepository, + new WorkspaceHelper(configRepository, jobPersistence), + TrackingClientSingleton.get()); + final TemporalClient temporalClient = TemporalClient.production(temporalHost, workspaceRoot, configs); + + DogStatsDMetricSingleton.initialize(MetricEmittingApps.SCHEDULER, new DatadogClientConfiguration(configs)); + + LOGGER.info("Launching scheduler..."); + new SchedulerApp( + workspaceRoot, + jobPersistence, + configRepository, + jobCleaner, + jobNotifier, + temporalClient, + Integer.parseInt(configs.getSubmitterNumThreads()), + configs.getSyncJobMaxAttempts(), + configs.getAirbyteVersionOrWarning(), configs.getWorkerEnvironment(), configs.getLogConfigs()) + .start(); + } } } diff --git a/airbyte-scheduler/persistence/build.gradle b/airbyte-scheduler/persistence/build.gradle index 73ae03582082f..58cff03087660 100644 --- a/airbyte-scheduler/persistence/build.gradle +++ b/airbyte-scheduler/persistence/build.gradle @@ -15,6 +15,7 @@ dependencies { implementation project(':airbyte-protocol:models') implementation project(':airbyte-scheduler:models') - testImplementation "org.flywaydb:flyway-core:7.14.0" - testImplementation "org.testcontainers:postgresql:1.15.3" + testImplementation libs.flyway.core + testImplementation libs.testcontainers.postgresql + testImplementation project(':airbyte-test-utils') } diff --git a/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/DefaultJobPersistenceTest.java b/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/DefaultJobPersistenceTest.java index 81f22283719f9..1d283d767f435 100644 --- a/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/DefaultJobPersistenceTest.java +++ b/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/DefaultJobPersistenceTest.java @@ -31,6 +31,7 @@ import io.airbyte.config.JobOutput; import io.airbyte.config.JobSyncConfig; import io.airbyte.db.Database; +import io.airbyte.db.factory.DSLContextFactory; import io.airbyte.db.instance.jobs.JobsDatabaseSchema; import io.airbyte.db.instance.test.TestDatabaseProviders; import io.airbyte.scheduler.models.Attempt; @@ -39,8 +40,10 @@ import io.airbyte.scheduler.models.Job; import io.airbyte.scheduler.models.JobStatus; import io.airbyte.scheduler.models.JobWithStatusAndTimestamp; +import io.airbyte.test.utils.DatabaseConnectionHelper; import io.airbyte.validation.json.JsonSchemaValidator; import io.airbyte.validation.json.JsonValidationException; +import java.io.Closeable; import java.io.IOException; import java.nio.file.Path; import java.sql.SQLException; @@ -59,8 +62,11 @@ import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.Stream; +import javax.sql.DataSource; +import org.jooq.DSLContext; import org.jooq.Record; import org.jooq.Result; +import org.jooq.SQLDialect; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; @@ -98,9 +104,10 @@ class DefaultJobPersistenceTest { private static PostgreSQLContainer container; private Database jobDatabase; - private Database configDatabase; private Supplier timeSupplier; private JobPersistence jobPersistence; + private DataSource dataSource; + private DSLContext dslContext; @BeforeAll public static void dbSetup() { @@ -168,7 +175,9 @@ private static Job createJob( @SuppressWarnings("unchecked") @BeforeEach public void setup() throws Exception { - final TestDatabaseProviders databaseProviders = new TestDatabaseProviders(container); + dataSource = DatabaseConnectionHelper.createDataSource(container); + dslContext = DSLContextFactory.create(dataSource, SQLDialect.POSTGRES); + final TestDatabaseProviders databaseProviders = new TestDatabaseProviders(dataSource, dslContext); jobDatabase = databaseProviders.createNewJobsDatabase(); resetDb(); @@ -180,8 +189,11 @@ public void setup() throws Exception { } @AfterEach - void tearDown() throws Exception { - jobDatabase.close(); + void tearDown() throws IOException { + dslContext.close(); + if (dataSource instanceof Closeable closeable) { + closeable.close(); + } } private void resetDb() throws SQLException { diff --git a/airbyte-server/build.gradle b/airbyte-server/build.gradle index e4e8d26966ba4..c108f322221c1 100644 --- a/airbyte-server/build.gradle +++ b/airbyte-server/build.gradle @@ -63,7 +63,7 @@ dependencies { implementation 'org.glassfish.jersey.inject:jersey-hk2' implementation 'org.glassfish.jersey.media:jersey-media-json-jackson' implementation 'org.glassfish.jersey.ext:jersey-bean-validation' - implementation "org.flywaydb:flyway-core:7.14.0" + implementation libs.flyway.core implementation 'com.github.slugify:slugify:2.4' @@ -85,11 +85,10 @@ dependencies { implementation project(':airbyte-scheduler:persistence') implementation project(':airbyte-workers') - testImplementation "org.postgresql:postgresql:42.2.18" - + testImplementation libs.postgresql testImplementation 'com.squareup.okhttp3:mockwebserver:4.9.1' - - testImplementation "org.testcontainers:postgresql:1.15.3" + testImplementation libs.testcontainers.postgresql + testImplementation project(':airbyte-test-utils') } // we want to be able to access the generated db files from config/init when we build the server docker image. diff --git a/airbyte-server/src/main/java/io/airbyte/server/ConfigurationApiFactory.java b/airbyte-server/src/main/java/io/airbyte/server/ConfigurationApiFactory.java index 53ab8a3ea48d8..a2290a9d378cb 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/ConfigurationApiFactory.java +++ b/airbyte-server/src/main/java/io/airbyte/server/ConfigurationApiFactory.java @@ -25,6 +25,7 @@ import java.net.http.HttpClient; import java.nio.file.Path; import java.util.Map; +import org.flywaydb.core.Flyway; import org.glassfish.hk2.api.Factory; import org.slf4j.MDC; @@ -52,6 +53,8 @@ public class ConfigurationApiFactory implements Factory { private static HttpClient httpClient; private static FeatureFlags featureFlags; private static EventRunner eventRunner; + private static Flyway configsFlyway; + private static Flyway jobsFlyway; public static void setValues( final WorkflowServiceStubs temporalService, @@ -75,7 +78,9 @@ public static void setValues( final Path workspaceRoot, final HttpClient httpClient, final FeatureFlags featureFlags, - final EventRunner eventRunner) { + final EventRunner eventRunner, + final Flyway configsFlyway, + final Flyway jobsFlyway) { ConfigurationApiFactory.configRepository = configRepository; ConfigurationApiFactory.jobPersistence = jobPersistence; ConfigurationApiFactory.seed = seed; @@ -98,6 +103,8 @@ public static void setValues( ConfigurationApiFactory.httpClient = httpClient; ConfigurationApiFactory.featureFlags = featureFlags; ConfigurationApiFactory.eventRunner = eventRunner; + ConfigurationApiFactory.configsFlyway = configsFlyway; + ConfigurationApiFactory.jobsFlyway = jobsFlyway; } @Override @@ -125,7 +132,9 @@ public ConfigurationApi provide() { ConfigurationApiFactory.workspaceRoot, ConfigurationApiFactory.httpClient, ConfigurationApiFactory.featureFlags, - ConfigurationApiFactory.eventRunner); + ConfigurationApiFactory.eventRunner, + ConfigurationApiFactory.configsFlyway, + ConfigurationApiFactory.jobsFlyway); } @Override diff --git a/airbyte-server/src/main/java/io/airbyte/server/ServerApp.java b/airbyte-server/src/main/java/io/airbyte/server/ServerApp.java index 5095586381aa9..bfc1911cb5e0c 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/ServerApp.java +++ b/airbyte-server/src/main/java/io/airbyte/server/ServerApp.java @@ -9,6 +9,7 @@ import io.airbyte.analytics.TrackingClientSingleton; import io.airbyte.commons.features.EnvVariableFeatureFlags; import io.airbyte.commons.features.FeatureFlags; +import io.airbyte.commons.lang.CloseableShutdownHook; import io.airbyte.commons.resources.MoreResources; import io.airbyte.commons.version.AirbyteVersion; import io.airbyte.config.Configs; @@ -26,6 +27,9 @@ import io.airbyte.config.persistence.split_secrets.SecretPersistence; import io.airbyte.config.persistence.split_secrets.SecretsHydrator; import io.airbyte.db.Database; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.FlywayFactory; import io.airbyte.db.instance.DatabaseInstance; import io.airbyte.db.instance.MinimumFlywayMigrationVersionCheck; import io.airbyte.db.instance.configs.ConfigsDatabaseInstance; @@ -48,6 +52,7 @@ import io.airbyte.server.errors.KnownExceptionMapper; import io.airbyte.server.errors.NotFoundExceptionMapper; import io.airbyte.server.errors.UncaughtExceptionMapper; +import io.airbyte.server.handlers.DbMigrationHandler; import io.airbyte.validation.json.JsonValidationException; import io.airbyte.workers.WorkerConfigs; import io.airbyte.workers.temporal.TemporalClient; @@ -60,13 +65,17 @@ import java.util.Set; import java.util.UUID; import java.util.stream.Collectors; +import javax.sql.DataSource; import lombok.val; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.servlet.ServletHolder; +import org.flywaydb.core.Flyway; import org.glassfish.jersey.jackson.internal.jackson.jaxrs.json.JacksonJaxbJsonProvider; import org.glassfish.jersey.server.ResourceConfig; import org.glassfish.jersey.servlet.ServletContainer; +import org.jooq.DSLContext; +import org.jooq.SQLDialect; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.slf4j.MDC; @@ -75,6 +84,7 @@ public class ServerApp implements ServerRunnable { private static final Logger LOGGER = LoggerFactory.getLogger(ServerApp.class); private static final int PORT = 8001; + private static final String DRIVER_CLASS_NAME = "org.postgresql.Driver"; private final AirbyteVersion airbyteVersion; private final Set> customComponentClasses; @@ -127,24 +137,36 @@ public void start() throws Exception { private static void assertDatabasesReady(final Configs configs, final DatabaseInstance configsDatabaseInstance, - final DatabaseInstance jobsDatabaseInstance) + final DataSource configsDataSource, + final DatabaseInstance jobsDatabaseInstance, + final DataSource jobsDataSource) throws InterruptedException { LOGGER.info("Checking configs database flyway migration version.."); MinimumFlywayMigrationVersionCheck.assertDatabase(configsDatabaseInstance, MinimumFlywayMigrationVersionCheck.DEFAULT_ASSERT_DATABASE_TIMEOUT_MS); - val configsMigrator = new ConfigsDatabaseMigrator(configsDatabaseInstance.getInitialized(), ServerApp.class.getName()); + final Flyway configsFlyway = FlywayFactory.create(configsDataSource, ServerApp.class.getName(), ConfigsDatabaseMigrator.DB_IDENTIFIER, + ConfigsDatabaseMigrator.MIGRATION_FILE_LOCATION); + val configsMigrator = new ConfigsDatabaseMigrator(configsDatabaseInstance.getInitialized(), configsFlyway); MinimumFlywayMigrationVersionCheck.assertMigrations(configsMigrator, configs.getConfigsDatabaseMinimumFlywayMigrationVersion(), configs.getConfigsDatabaseInitializationTimeoutMs()); LOGGER.info("Checking jobs database flyway migration version.."); MinimumFlywayMigrationVersionCheck.assertDatabase(jobsDatabaseInstance, MinimumFlywayMigrationVersionCheck.DEFAULT_ASSERT_DATABASE_TIMEOUT_MS); - val jobsMigrator = new JobsDatabaseMigrator(jobsDatabaseInstance.getInitialized(), ServerApp.class.getName()); + final Flyway jobsFlyway = FlywayFactory.create(jobsDataSource, ServerApp.class.getName(), JobsDatabaseMigrator.DB_IDENTIFIER, + JobsDatabaseMigrator.MIGRATION_FILE_LOCATION); + val jobsMigrator = new JobsDatabaseMigrator(jobsDatabaseInstance.getInitialized(), jobsFlyway); MinimumFlywayMigrationVersionCheck.assertMigrations(jobsMigrator, configs.getJobsDatabaseMinimumFlywayMigrationVersion(), configs.getJobsDatabaseInitializationTimeoutMs()); } - public static ServerRunnable getServer(final ServerFactory apiFactory, final ConfigPersistence seed) throws Exception { - final Configs configs = new EnvConfigs(); + public static ServerRunnable getServer(final ServerFactory apiFactory, + final ConfigPersistence seed, + final Configs configs, + final DSLContext configsDslContext, + final DataSource configsDataSource, + final DSLContext jobsDslContext, + final DataSource jobsDataSource) + throws Exception { final WorkerConfigs workerConfigs = new WorkerConfigs(configs); LogClientSingleton.getInstance().setWorkspaceMdc( @@ -154,10 +176,10 @@ public static ServerRunnable getServer(final ServerFactory apiFactory, final Con LOGGER.info("Checking databases.."); final DatabaseInstance configsDatabaseInstance = - new ConfigsDatabaseInstance(configs.getConfigDatabaseUser(), configs.getConfigDatabasePassword(), configs.getConfigDatabaseUrl()); + new ConfigsDatabaseInstance(configsDslContext); final DatabaseInstance jobsDatabaseInstance = - new JobsDatabaseInstance(configs.getDatabaseUser(), configs.getDatabasePassword(), configs.getDatabaseUrl()); - assertDatabasesReady(configs, configsDatabaseInstance, jobsDatabaseInstance); + new JobsDatabaseInstance(jobsDslContext); + assertDatabasesReady(configs, configsDatabaseInstance, configsDataSource, jobsDatabaseInstance, jobsDataSource); LOGGER.info("Creating Staged Resource folder..."); ConfigDumpImporter.initStagedResourceFolder(); @@ -170,9 +192,9 @@ public static ServerRunnable getServer(final ServerFactory apiFactory, final Con .copySecrets(false) .build(); final ConfigPersistence configPersistence = DatabaseConfigPersistence.createWithValidation(configDatabase, jsonSecretsProcessor); - final SecretsHydrator secretsHydrator = SecretPersistence.getSecretsHydrator(configs); - final Optional secretPersistence = SecretPersistence.getLongLived(configs); - final Optional ephemeralSecretPersistence = SecretPersistence.getEphemeral(configs); + final SecretsHydrator secretsHydrator = SecretPersistence.getSecretsHydrator(configsDslContext, configs); + final Optional secretPersistence = SecretPersistence.getLongLived(configsDslContext, configs); + final Optional ephemeralSecretPersistence = SecretPersistence.getEphemeral(configsDslContext, configs); final ConfigRepository configRepository = new ConfigRepository(configPersistence, configDatabase); final SecretsRepositoryReader secretsRepositoryReader = new SecretsRepositoryReader(configRepository, secretsHydrator); final SecretsRepositoryWriter secretsRepositoryWriter = @@ -206,6 +228,11 @@ public static ServerRunnable getServer(final ServerFactory apiFactory, final Con final EventRunner eventRunner = new TemporalEventRunner( TemporalClient.production(configs.getTemporalHost(), configs.getWorkspaceRoot(), configs)); + final Flyway configsFlyway = FlywayFactory.create(configsDataSource, DbMigrationHandler.class.getSimpleName(), + ConfigsDatabaseMigrator.DB_IDENTIFIER, ConfigsDatabaseMigrator.MIGRATION_FILE_LOCATION); + final Flyway jobsFlyway = FlywayFactory.create(jobsDataSource, DbMigrationHandler.class.getSimpleName(), JobsDatabaseMigrator.DB_IDENTIFIER, + JobsDatabaseMigrator.MIGRATION_FILE_LOCATION); + LOGGER.info("Starting server..."); return apiFactory.create( @@ -228,7 +255,9 @@ public static ServerRunnable getServer(final ServerFactory apiFactory, final Con configs.getWorkspaceRoot(), httpClient, featureFlags, - eventRunner); + eventRunner, + configsFlyway, + jobsFlyway); } private static void migrateExistingConnection(final ConfigRepository configRepository, final EventRunner eventRunner) @@ -244,7 +273,24 @@ private static void migrateExistingConnection(final ConfigRepository configRepos public static void main(final String[] args) throws Exception { try { - getServer(new ServerFactory.Api(), YamlSeedConfigPersistence.getDefault()).start(); + final Configs configs = new EnvConfigs(); + + // Manual configuration that will be replaced by Dependency Injection in the future + final DataSource configsDataSource = + DataSourceFactory.create(configs.getConfigDatabaseUser(), configs.getConfigDatabasePassword(), DRIVER_CLASS_NAME, + configs.getConfigDatabaseUrl()); + final DataSource jobsDataSource = + DataSourceFactory.create(configs.getDatabaseUser(), configs.getDatabasePassword(), DRIVER_CLASS_NAME, configs.getDatabaseUrl()); + + try (final DSLContext configsDslContext = DSLContextFactory.create(configsDataSource, SQLDialect.POSTGRES); + final DSLContext jobsDslContext = DSLContextFactory.create(jobsDataSource, SQLDialect.POSTGRES)) { + + // Ensure that the database resources are closed on application shutdown + CloseableShutdownHook.registerRuntimeShutdownHook(configsDataSource, jobsDataSource, configsDslContext, jobsDslContext); + + getServer(new ServerFactory.Api(), YamlSeedConfigPersistence.getDefault(), + configs, configsDslContext, configsDataSource, jobsDslContext, jobsDataSource).start(); + } } catch (final Throwable e) { LOGGER.error("Server failed", e); System.exit(1); // so the app doesn't hang on background threads diff --git a/airbyte-server/src/main/java/io/airbyte/server/ServerFactory.java b/airbyte-server/src/main/java/io/airbyte/server/ServerFactory.java index 881e17f686c97..8433e1c8395f7 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/ServerFactory.java +++ b/airbyte-server/src/main/java/io/airbyte/server/ServerFactory.java @@ -26,6 +26,7 @@ import java.nio.file.Path; import java.util.Set; import java.util.concurrent.TimeUnit; +import org.flywaydb.core.Flyway; import org.slf4j.MDC; public interface ServerFactory { @@ -49,7 +50,9 @@ ServerRunnable create(SchedulerJobClient schedulerJobClient, Path workspaceRoot, HttpClient httpClient, FeatureFlags featureFlags, - EventRunner eventRunner); + EventRunner eventRunner, + Flyway configsFlyway, + Flyway jobsFlyway); class Api implements ServerFactory { @@ -73,7 +76,9 @@ public ServerRunnable create(final SchedulerJobClient schedulerJobClient, final Path workspaceRoot, final HttpClient httpClient, final FeatureFlags featureFlags, - final EventRunner eventRunner) { + final EventRunner eventRunner, + final Flyway configsFlyway, + final Flyway jobsFlyway) { // set static values for factory ConfigurationApiFactory.setValues( temporalService, @@ -97,7 +102,9 @@ public ServerRunnable create(final SchedulerJobClient schedulerJobClient, workspaceRoot, httpClient, featureFlags, - eventRunner); + eventRunner, + configsFlyway, + jobsFlyway); // server configurations final Set> componentClasses = Set.of(ConfigurationApi.class); diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/ConfigurationApi.java b/airbyte-server/src/main/java/io/airbyte/server/apis/ConfigurationApi.java index 464fa12e1b47f..1c2667d2c8b1d 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/ConfigurationApi.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/ConfigurationApi.java @@ -141,6 +141,7 @@ import java.net.http.HttpClient; import java.nio.file.Path; import java.util.Map; +import org.flywaydb.core.Flyway; @javax.ws.rs.Path("/v1") public class ConfigurationApi implements io.airbyte.api.V1Api { @@ -185,7 +186,9 @@ public ConfigurationApi(final ConfigRepository configRepository, final Path workspaceRoot, final HttpClient httpClient, final FeatureFlags featureFlags, - final EventRunner eventRunner) { + final EventRunner eventRunner, + final Flyway configsFlyway, + final Flyway jobsFlyway) { this.workerEnvironment = workerEnvironment; this.logConfigs = logConfigs; this.workspaceRoot = workspaceRoot; @@ -259,7 +262,7 @@ public ConfigurationApi(final ConfigRepository configRepository, true); logsHandler = new LogsHandler(); openApiConfigHandler = new OpenApiConfigHandler(); - dbMigrationHandler = new DbMigrationHandler(configsDatabase, jobsDatabase); + dbMigrationHandler = new DbMigrationHandler(configsDatabase, configsFlyway, jobsDatabase, jobsFlyway); } // WORKSPACE @@ -821,7 +824,7 @@ public WebBackendConnectionRead webBackendGetConnection(final WebBackendConnecti } @Override - public WebBackendWorkspaceStateResult webBackendGetWorkspaceState(WebBackendWorkspaceState webBackendWorkspaceState) { + public WebBackendWorkspaceStateResult webBackendGetWorkspaceState(final WebBackendWorkspaceState webBackendWorkspaceState) { return execute(() -> webBackendConnectionsHandler.getWorkspaceState(webBackendWorkspaceState)); } diff --git a/airbyte-server/src/main/java/io/airbyte/server/handlers/DbMigrationHandler.java b/airbyte-server/src/main/java/io/airbyte/server/handlers/DbMigrationHandler.java index 2f0f187a49caf..b669ea27fea3f 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/handlers/DbMigrationHandler.java +++ b/airbyte-server/src/main/java/io/airbyte/server/handlers/DbMigrationHandler.java @@ -14,6 +14,7 @@ import io.airbyte.db.instance.configs.ConfigsDatabaseMigrator; import io.airbyte.db.instance.jobs.JobsDatabaseMigrator; import java.util.stream.Collectors; +import org.flywaydb.core.Flyway; import org.flywaydb.core.api.MigrationInfo; import org.flywaydb.core.api.output.MigrateOutput; import org.flywaydb.core.api.output.MigrateResult; @@ -23,9 +24,9 @@ public class DbMigrationHandler { private final DatabaseMigrator configDbMigrator; private final DatabaseMigrator jobDbMigrator; - public DbMigrationHandler(final Database configsDatabase, final Database jobsDatabase) { - this.configDbMigrator = new ConfigsDatabaseMigrator(configsDatabase, DbMigrationHandler.class.getSimpleName()); - this.jobDbMigrator = new JobsDatabaseMigrator(jobsDatabase, DbMigrationHandler.class.getSimpleName()); + public DbMigrationHandler(final Database configsDatabase, final Flyway configsFlyway, final Database jobsDatabase, final Flyway jobsFlyway) { + this.configDbMigrator = new ConfigsDatabaseMigrator(configsDatabase, configsFlyway); + this.jobDbMigrator = new JobsDatabaseMigrator(jobsDatabase, jobsFlyway); } public DbMigrationReadList list(final DbMigrationRequestBody request) { diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/ConfigurationApiTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/ConfigurationApiTest.java index 051db77017984..0688b462d75e3 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/ConfigurationApiTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/apis/ConfigurationApiTest.java @@ -28,6 +28,7 @@ import io.temporal.serviceclient.WorkflowServiceStubs; import java.net.http.HttpClient; import java.nio.file.Path; +import org.flywaydb.core.Flyway; import org.junit.jupiter.api.Test; public class ConfigurationApiTest { @@ -59,7 +60,9 @@ void testImportDefinitions() { Path.of(""), mock(HttpClient.class), mock(FeatureFlags.class), - mock(EventRunner.class)); + mock(EventRunner.class), + mock(Flyway.class), + mock(Flyway.class)); assertTrue(configurationApi.canImportDefinitons()); } diff --git a/airbyte-server/src/test/java/io/airbyte/server/handlers/ArchiveHandlerTest.java b/airbyte-server/src/test/java/io/airbyte/server/handlers/ArchiveHandlerTest.java index 303808aed450e..fae5cc83d61c6 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/handlers/ArchiveHandlerTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/handlers/ArchiveHandlerTest.java @@ -38,12 +38,15 @@ import io.airbyte.config.persistence.split_secrets.JsonSecretsProcessor; import io.airbyte.config.persistence.split_secrets.NoOpSecretsHydrator; import io.airbyte.db.Database; +import io.airbyte.db.factory.DSLContextFactory; import io.airbyte.db.instance.test.TestDatabaseProviders; import io.airbyte.protocol.models.ConnectorSpecification; import io.airbyte.scheduler.persistence.DefaultJobPersistence; import io.airbyte.scheduler.persistence.JobPersistence; import io.airbyte.scheduler.persistence.WorkspaceHelper; +import io.airbyte.test.utils.DatabaseConnectionHelper; import io.airbyte.validation.json.JsonValidationException; +import java.io.Closeable; import java.io.File; import java.io.IOException; import java.nio.file.Files; @@ -56,6 +59,9 @@ import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.Stream; +import javax.sql.DataSource; +import org.jooq.DSLContext; +import org.jooq.SQLDialect; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; @@ -74,6 +80,8 @@ public class ArchiveHandlerTest { private static final AirbyteVersion VERSION = new AirbyteVersion("0.6.8"); private static PostgreSQLContainer container; + private DataSource dataSource; + private DSLContext dslContext; private Database jobDatabase; private Database configDatabase; private JobPersistence jobPersistence; @@ -111,7 +119,9 @@ public static void dbDown() { @BeforeEach public void setup() throws Exception { - final TestDatabaseProviders databaseProviders = new TestDatabaseProviders(container); + dataSource = DatabaseConnectionHelper.createDataSource(container); + dslContext = DSLContextFactory.create(dataSource, SQLDialect.POSTGRES); + final TestDatabaseProviders databaseProviders = new TestDatabaseProviders(dataSource, dslContext); jobDatabase = databaseProviders.createNewJobsDatabase(); configDatabase = databaseProviders.createNewConfigsDatabase(); jobPersistence = new DefaultJobPersistence(jobDatabase); @@ -142,9 +152,11 @@ public void setup() throws Exception { } @AfterEach - void tearDown() throws Exception { - jobDatabase.close(); - configDatabase.close(); + void tearDown() throws IOException { + dslContext.close(); + if (dataSource instanceof Closeable closeable) { + closeable.close(); + } } /** diff --git a/airbyte-test-utils/build.gradle b/airbyte-test-utils/build.gradle index ae5668100261b..74ebc96d5dda4 100644 --- a/airbyte-test-utils/build.gradle +++ b/airbyte-test-utils/build.gradle @@ -5,8 +5,9 @@ plugins { dependencies { implementation project(':airbyte-db:lib') - implementation 'org.testcontainers:postgresql:1.15.3' - implementation "org.testcontainers:cockroachdb:1.15.3" + implementation libs.testcontainers.jdbc + implementation libs.testcontainers.postgresql + implementation libs.testcontainers.cockroachdb implementation 'org.junit.jupiter:junit-jupiter-api:5.7.2' } diff --git a/airbyte-test-utils/src/main/java/io/airbyte/test/utils/CockroachDBContainerHelper.java b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/CockroachDBContainerHelper.java index 378453f8d86d9..72c6f10e72be4 100644 --- a/airbyte-test-utils/src/main/java/io/airbyte/test/utils/CockroachDBContainerHelper.java +++ b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/CockroachDBContainerHelper.java @@ -10,11 +10,15 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.string.Strings; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; +import io.airbyte.db.jdbc.JdbcUtils; import java.io.IOException; import java.util.UUID; -import org.jooq.SQLDialect; +import javax.sql.DataSource; +import org.jooq.DSLContext; import org.testcontainers.containers.CockroachContainer; import org.testcontainers.utility.MountableFile; @@ -61,27 +65,23 @@ public static JsonNode getDestinationConfig(final CockroachContainer psqlDb, fin .build()); } - public static Database getDatabaseFromConfig(final JsonNode config) { - return Databases.createDatabase( + public static DataSource getDataSourceFromConfig(final JsonNode config) { + return DataSourceFactory.create( config.get("username").asText(), config.get("password").asText(), - String.format("jdbc:postgresql://%s:%s/%s", + DatabaseDriver.POSTGRESQL.getDriverClassName(), + String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), config.get("host").asText(), - config.get("port").asText(), - config.get("database").asText()), - "org.postgresql.Driver", - SQLDialect.POSTGRES); + config.get("port").asInt(), + config.get("database").asText())); } - public static JdbcDatabase getJdbcDatabaseFromConfig(final JsonNode config) { - return Databases.createJdbcDatabase( - config.get("username").asText(), - config.get("password").asText(), - String.format("jdbc:postgresql://%s:%s/%s", - config.get("host").asText(), - config.get("port").asText(), - config.get("database").asText()), - "org.postgresql.Driver"); + public static Database getDatabaseFromConfig(final DSLContext dslContext) {// final JsonNode config) { + return new Database(dslContext); + } + + public static JdbcDatabase getJdbcDatabaseFromConfig(final DataSource dataSource) { // final JsonNode config) { + return new DefaultJdbcDatabase(dataSource, JdbcUtils.getDefaultSourceOperations()); } } diff --git a/airbyte-test-utils/src/main/java/io/airbyte/test/utils/DatabaseConnectionHelper.java b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/DatabaseConnectionHelper.java new file mode 100644 index 0000000000000..85345f8996305 --- /dev/null +++ b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/DatabaseConnectionHelper.java @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.test.utils; + +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DataSourceFactory; +import javax.sql.DataSource; +import org.jooq.DSLContext; +import org.jooq.SQLDialect; +import org.testcontainers.containers.JdbcDatabaseContainer; + +/** + * Helper class that facilitates the creation of database connection objects for testing purposes. + */ +public class DatabaseConnectionHelper { + + /** + * Constructs a new {@link DataSource} using the provided configuration. + * + * @param container A JDBC Test Container instance. + * @return The configured {@link DataSource}. + */ + public static DataSource createDataSource(final JdbcDatabaseContainer container) { + return DataSourceFactory.create(container.getUsername(), + container.getPassword(), + container.getDriverClassName(), + container.getJdbcUrl()); + } + + /** + * Constructs a configured {@link DSLContext} instance using the provided configuration. + * + * @param container A JDBC Test Container instance. + * @param dialect The SQL dialect to use with objects created from this context. + * @return The configured {@link DSLContext}. + */ + public static DSLContext createDslContext(final JdbcDatabaseContainer container, final SQLDialect dialect) { + return DSLContextFactory.create( + container.getUsername(), + container.getPassword(), + container.getDriverClassName(), + container.getJdbcUrl(), + dialect); + } + +} diff --git a/airbyte-test-utils/src/main/java/io/airbyte/test/utils/PostgreSQLContainerHelper.java b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/PostgreSQLContainerHelper.java index 6d0cfdbd13966..3115934f3776d 100644 --- a/airbyte-test-utils/src/main/java/io/airbyte/test/utils/PostgreSQLContainerHelper.java +++ b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/PostgreSQLContainerHelper.java @@ -10,11 +10,15 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.string.Strings; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; +import io.airbyte.db.jdbc.JdbcUtils; import java.io.IOException; import java.util.UUID; -import org.jooq.SQLDialect; +import javax.sql.DataSource; +import org.jooq.DSLContext; import org.testcontainers.containers.PostgreSQLContainer; import org.testcontainers.utility.MountableFile; @@ -57,27 +61,23 @@ public static JsonNode getDestinationConfig(final PostgreSQLContainer psqlDb, .build()); } - public static Database getDatabaseFromConfig(final JsonNode config) { - return Databases.createDatabase( + public static DataSource getDataSourceFromConfig(final JsonNode config) { + return DataSourceFactory.create( config.get("username").asText(), config.get("password").asText(), - String.format("jdbc:postgresql://%s:%s/%s", + DatabaseDriver.POSTGRESQL.getDriverClassName(), + String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), config.get("host").asText(), - config.get("port").asText(), - config.get("database").asText()), - "org.postgresql.Driver", - SQLDialect.POSTGRES); + config.get("port").asInt(), + config.get("database").asText())); } - public static JdbcDatabase getJdbcDatabaseFromConfig(final JsonNode config) { - return Databases.createJdbcDatabase( - config.get("username").asText(), - config.get("password").asText(), - String.format("jdbc:postgresql://%s:%s/%s", - config.get("host").asText(), - config.get("port").asText(), - config.get("database").asText()), - "org.postgresql.Driver"); + public static Database getDatabaseFromConfig(final DSLContext dslContext) { + return new Database(dslContext); + } + + public static JdbcDatabase getJdbcDatabaseFromConfig(final DataSource dataSource) { + return new DefaultJdbcDatabase(dataSource, JdbcUtils.getDefaultSourceOperations()); } } diff --git a/airbyte-test-utils/src/test/java/io/airbyte/test/utils/DatabaseConnectionHelperTest.java b/airbyte-test-utils/src/test/java/io/airbyte/test/utils/DatabaseConnectionHelperTest.java new file mode 100644 index 0000000000000..8698da73f3084 --- /dev/null +++ b/airbyte-test-utils/src/test/java/io/airbyte/test/utils/DatabaseConnectionHelperTest.java @@ -0,0 +1,55 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.test.utils; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +import com.zaxxer.hikari.HikariDataSource; +import javax.sql.DataSource; +import org.jooq.DSLContext; +import org.jooq.SQLDialect; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.testcontainers.containers.PostgreSQLContainer; + +public class DatabaseConnectionHelperTest { + + private static final String DATABASE_NAME = "airbyte_test_database"; + + protected static PostgreSQLContainer container; + + @BeforeAll + public static void dbSetup() { + container = new PostgreSQLContainer<>("postgres:13-alpine") + .withDatabaseName(DATABASE_NAME) + .withUsername("docker") + .withPassword("docker"); + container.start(); + } + + @AfterAll + public static void dbDown() { + container.close(); + } + + @Test + void testCreatingFromATestContainer() { + final DataSource dataSource = DatabaseConnectionHelper.createDataSource(container); + assertNotNull(dataSource); + assertEquals(HikariDataSource.class, dataSource.getClass()); + assertEquals(5, ((HikariDataSource) dataSource).getHikariConfigMXBean().getMaximumPoolSize()); + } + + @Test + void testCreatingADslContextFromATestContainer() { + final SQLDialect dialect = SQLDialect.POSTGRES; + final DSLContext dslContext = DatabaseConnectionHelper.createDslContext(container, dialect); + assertNotNull(dslContext); + assertEquals(dialect, dslContext.configuration().dialect()); + } + +} diff --git a/airbyte-tests/build.gradle b/airbyte-tests/build.gradle index 7c65475095cfb..ccc287e68455a 100644 --- a/airbyte-tests/build.gradle +++ b/airbyte-tests/build.gradle @@ -39,7 +39,7 @@ dependencies { implementation project(':airbyte-container-orchestrator') implementation 'io.fabric8:kubernetes-client:5.12.2' - implementation 'org.testcontainers:testcontainers:1.15.3' + implementation libs.testcontainers acceptanceTestsImplementation project(':airbyte-api') acceptanceTestsImplementation project(':airbyte-commons') @@ -54,14 +54,14 @@ dependencies { acceptanceTestsImplementation 'io.github.cdimascio:java-dotenv:3.0.0' acceptanceTestsImplementation 'io.temporal:temporal-sdk:1.8.1' acceptanceTestsImplementation 'org.apache.commons:commons-csv:1.4' - acceptanceTestsImplementation 'org.testcontainers:postgresql:1.15.3' - acceptanceTestsImplementation 'org.postgresql:postgresql:42.2.18' + acceptanceTestsImplementation libs.testcontainers.postgresql + acceptanceTestsImplementation libs.postgresql automaticMigrationAcceptanceTestImplementation project(':airbyte-api') automaticMigrationAcceptanceTestImplementation project(':airbyte-commons') automaticMigrationAcceptanceTestImplementation project(':airbyte-tests') - automaticMigrationAcceptanceTestImplementation 'org.testcontainers:testcontainers:1.15.3' + automaticMigrationAcceptanceTestImplementation libs.testcontainers } // test should run using the current version of the docker compose configuration. diff --git a/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/AcceptanceTests.java b/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/AcceptanceTests.java index 6f18ec170c8f3..fbee095ff3abb 100644 --- a/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/AcceptanceTests.java +++ b/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/AcceptanceTests.java @@ -80,8 +80,8 @@ import io.airbyte.commons.util.MoreProperties; import io.airbyte.container_orchestrator.ContainerOrchestratorApp; import io.airbyte.db.Database; -import io.airbyte.db.Databases; import io.airbyte.test.airbyte_test_container.AirbyteTestContainer; +import io.airbyte.test.utils.DatabaseConnectionHelper; import io.airbyte.test.utils.PostgreSQLContainerHelper; import io.airbyte.workers.temporal.TemporalUtils; import io.airbyte.workers.temporal.scheduling.ConnectionManagerWorkflow; @@ -117,6 +117,7 @@ import org.jooq.JSONB; import org.jooq.Record; import org.jooq.Result; +import org.jooq.SQLDialect; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; @@ -596,7 +597,6 @@ public void testIncrementalSync() throws Exception { // full refreshing, this record will appear in the output and cause the test to fail. if we are, // correctly, doing incremental, we will not find this value in the destination. source.query(ctx -> ctx.execute("UPDATE id_and_name SET name='yennefer' WHERE id=2")); - source.close(); LOGGER.info("Starting testIncrementalSync() sync 2"); final JobInfoRead connectionSyncRead2 = apiClient.getConnectionApi() @@ -747,7 +747,6 @@ public void testIncrementalDedupeSync() throws Exception { // retrieve latest snapshot of source records after modifications; the deduplicated table in // destination should mirror this latest state of records final List expectedNormalizedRecords = retrieveSourceRecords(source, STREAM_NAME); - source.close(); final JobInfoRead connectionSyncRead2 = apiClient.getConnectionApi() .syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); @@ -1332,7 +1331,7 @@ private Database getDestinationDatabase() { } private Database getDatabase(final PostgreSQLContainer db) { - return Databases.createPostgresDatabase(db.getUsername(), db.getPassword(), db.getJdbcUrl()); + return new Database(DatabaseConnectionHelper.createDslContext(db, SQLDialect.POSTGRES)); } private Set listAllTables(final Database database) throws SQLException { diff --git a/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/GKEPostgresConfig.java b/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/GKEPostgresConfig.java index 123a0b7b93245..5ec03a0c3068b 100644 --- a/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/GKEPostgresConfig.java +++ b/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/GKEPostgresConfig.java @@ -5,10 +5,12 @@ package io.airbyte.test.acceptance; import io.airbyte.db.Database; -import io.airbyte.db.Databases; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.test.acceptance.AcceptanceTests.Type; import java.util.HashMap; import java.util.Map; +import org.jooq.SQLDialect; /** * This class is used to provide information related to the test databases for running the @@ -43,11 +45,13 @@ public static Map dbConfig(final Type connectorType, final boole } public static Database getSourceDatabase() { - return Databases.createPostgresDatabase(USERNAME, PASSWORD, "jdbc:postgresql://localhost:2000/postgresdb"); + return new Database(DSLContextFactory.create(USERNAME, PASSWORD, DatabaseDriver.POSTGRESQL.getDriverClassName(), + "jdbc:postgresql://localhost:2000/postgresdb", SQLDialect.POSTGRES)); } public static Database getDestinationDatabase() { - return Databases.createPostgresDatabase(USERNAME, PASSWORD, "jdbc:postgresql://localhost:3000/postgresdb"); + return new Database(DSLContextFactory.create(USERNAME, PASSWORD, DatabaseDriver.POSTGRESQL.getDriverClassName(), + "jdbc:postgresql://localhost:3000/postgresdb", SQLDialect.POSTGRES)); } } diff --git a/airbyte-workers/build.gradle b/airbyte-workers/build.gradle index 3830e60097e3f..a85cea7958140 100644 --- a/airbyte-workers/build.gradle +++ b/airbyte-workers/build.gradle @@ -33,11 +33,11 @@ dependencies { testImplementation 'io.temporal:temporal-testing:1.8.1' testImplementation 'com.jayway.jsonpath:json-path:2.7.0' - testImplementation "org.flywaydb:flyway-core:7.14.0" + testImplementation libs.flyway.core testImplementation 'org.mockito:mockito-inline:4.0.0' - testImplementation 'org.postgresql:postgresql:42.2.18' - testImplementation 'org.testcontainers:testcontainers:1.15.3' - testImplementation 'org.testcontainers:postgresql:1.15.3' + testImplementation libs.postgresql + testImplementation libs.testcontainers + testImplementation libs.testcontainers.postgresql testImplementation project(':airbyte-commons-docker') testImplementation project(':airbyte-test-utils') diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/WorkerApp.java b/airbyte-workers/src/main/java/io/airbyte/workers/WorkerApp.java index d87ffd380b5c2..e3c17a35a43c5 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/WorkerApp.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/WorkerApp.java @@ -9,6 +9,7 @@ import io.airbyte.analytics.TrackingClientSingleton; import io.airbyte.commons.features.EnvVariableFeatureFlags; import io.airbyte.commons.features.FeatureFlags; +import io.airbyte.commons.lang.CloseableShutdownHook; import io.airbyte.config.Configs; import io.airbyte.config.Configs.WorkerEnvironment; import io.airbyte.config.EnvConfigs; @@ -22,6 +23,9 @@ import io.airbyte.config.persistence.split_secrets.SecretPersistence; import io.airbyte.config.persistence.split_secrets.SecretsHydrator; import io.airbyte.db.Database; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.db.instance.configs.ConfigsDatabaseInstance; import io.airbyte.db.instance.jobs.JobsDatabaseInstance; import io.airbyte.metrics.lib.DatadogClientConfiguration; @@ -80,7 +84,10 @@ import java.util.Map; import java.util.Optional; import java.util.concurrent.Executors; +import javax.sql.DataSource; import lombok.AllArgsConstructor; +import org.jooq.DSLContext; +import org.jooq.SQLDialect; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.slf4j.MDC; @@ -90,6 +97,7 @@ public class WorkerApp { private static final Logger LOGGER = LoggerFactory.getLogger(WorkerApp.class); public static final int KUBE_HEARTBEAT_PORT = 9000; + private static final String DRIVER_CLASS_NAME = DatabaseDriver.POSTGRESQL.getDriverClassName(); // IMPORTANT: Changing the storage location will orphan already existing kube pods when the new // version is deployed! @@ -332,9 +340,7 @@ static Optional getContainerOrchestratorConfig(fina } } - private static void launchWorkerApp() throws IOException { - final Configs configs = new EnvConfigs(); - + private static void launchWorkerApp(final Configs configs, final DSLContext configsDslContext, final DSLContext jobsDslContext) throws IOException { DogStatsDMetricSingleton.initialize(MetricEmittingApps.WORKER, new DatadogClientConfiguration(configs)); final WorkerConfigs defaultWorkerConfigs = new WorkerConfigs(configs); @@ -358,7 +364,7 @@ private static void launchWorkerApp() throws IOException { final String temporalHost = configs.getTemporalHost(); LOGGER.info("temporalHost = " + temporalHost); - final SecretsHydrator secretsHydrator = SecretPersistence.getSecretsHydrator(configs); + final SecretsHydrator secretsHydrator = SecretPersistence.getSecretsHydrator(configsDslContext, configs); if (configs.getWorkerEnvironment().equals(WorkerEnvironment.KUBERNETES)) { KubePortManagerSingleton.init(configs.getTemporalWorkerPorts()); @@ -368,11 +374,7 @@ private static void launchWorkerApp() throws IOException { TemporalUtils.configureTemporalNamespace(temporalService); - final Database configDatabase = new ConfigsDatabaseInstance( - configs.getConfigDatabaseUser(), - configs.getConfigDatabasePassword(), - configs.getConfigDatabaseUrl()) - .getInitialized(); + final Database configDatabase = new ConfigsDatabaseInstance(configsDslContext).getInitialized(); final FeatureFlags featureFlags = new EnvVariableFeatureFlags(); final JsonSecretsProcessor jsonSecretsProcessor = JsonSecretsProcessor.builder() .maskSecrets(!featureFlags.exposeSecretsInExport()) @@ -381,11 +383,7 @@ private static void launchWorkerApp() throws IOException { final ConfigPersistence configPersistence = DatabaseConfigPersistence.createWithValidation(configDatabase, jsonSecretsProcessor); final ConfigRepository configRepository = new ConfigRepository(configPersistence, configDatabase); - final Database jobDatabase = new JobsDatabaseInstance( - configs.getDatabaseUser(), - configs.getDatabasePassword(), - configs.getDatabaseUrl()) - .getInitialized(); + final Database jobDatabase = new JobsDatabaseInstance(jobsDslContext).getInitialized(); final JobPersistence jobPersistence = new DefaultJobPersistence(jobDatabase); TrackingClientSingleton.initialize( @@ -456,7 +454,22 @@ private static void launchWorkerApp() throws IOException { public static void main(final String[] args) { try { - launchWorkerApp(); + final Configs configs = new EnvConfigs(); + + final DataSource configsDataSource = DataSourceFactory.create(configs.getConfigDatabaseUser(), configs.getConfigDatabasePassword(), + DRIVER_CLASS_NAME, configs.getConfigDatabaseUrl()); + final DataSource jobsDataSource = DataSourceFactory.create(configs.getDatabaseUser(), configs.getDatabasePassword(), + DRIVER_CLASS_NAME, configs.getDatabaseUrl()); + + // Manual configuration that will be replaced by Dependency Injection in the future + try (final DSLContext configsDslContext = DSLContextFactory.create(configsDataSource, SQLDialect.POSTGRES); + final DSLContext jobsDslContext = DSLContextFactory.create(jobsDataSource, SQLDialect.POSTGRES)) { + + // Ensure that the database resources are closed on application shutdown + CloseableShutdownHook.registerRuntimeShutdownHook(configsDataSource, jobsDataSource, configsDslContext, jobsDslContext); + + launchWorkerApp(configs, configsDslContext, jobsDslContext); + } } catch (final Throwable t) { LOGGER.error("Worker app failed", t); System.exit(1); diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/TemporalAttemptExecutionTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/TemporalAttemptExecutionTest.java index e73171176fc89..c5157ba1cdb41 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/TemporalAttemptExecutionTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/TemporalAttemptExecutionTest.java @@ -15,16 +15,21 @@ import io.airbyte.commons.functional.CheckedSupplier; import io.airbyte.config.Configs; import io.airbyte.db.Database; +import io.airbyte.db.factory.DSLContextFactory; import io.airbyte.db.instance.test.TestDatabaseProviders; import io.airbyte.scheduler.models.JobRunConfig; import io.airbyte.scheduler.persistence.DefaultJobPersistence; import io.airbyte.scheduler.persistence.JobPersistence; +import io.airbyte.test.utils.DatabaseConnectionHelper; import io.airbyte.workers.Worker; import io.temporal.serviceclient.CheckedExceptionWrapper; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.function.Consumer; +import javax.sql.DataSource; +import org.jooq.DSLContext; +import org.jooq.SQLDialect; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; @@ -42,6 +47,8 @@ class TemporalAttemptExecutionTest { private static PostgreSQLContainer container; private static Configs configs; + private static DataSource dataSource; + private static DSLContext dslContext; private Path jobRoot; @@ -57,12 +64,15 @@ static void setUpAll() { .withPassword(SOURCE_PASSWORD); container.start(); configs = mock(Configs.class); + + dataSource = DatabaseConnectionHelper.createDataSource(container); + dslContext = DSLContextFactory.create(dataSource, SQLDialect.POSTGRES); } @SuppressWarnings("unchecked") @BeforeEach void setup() throws IOException { - final TestDatabaseProviders databaseProviders = new TestDatabaseProviders(container); + final TestDatabaseProviders databaseProviders = new TestDatabaseProviders(dataSource, dslContext); final Database jobDatabase = databaseProviders.createNewJobsDatabase(); final JobPersistence jobPersistence = new DefaultJobPersistence(jobDatabase); diff --git a/deps.toml b/deps.toml index 5d637e2d3eff8..e0cf8f4249383 100644 --- a/deps.toml +++ b/deps.toml @@ -1,11 +1,16 @@ [versions] fasterxml_version = "2.13.0" +flyway = "7.14.0" glassfish_version = "2.31" +hikaricp = "5.0.1" commons_io = "2.7" log4j = "2.17.1" slf4j = "1.7.30" lombok = "1.18.22" +jooq = "3.13.4" junit-jupiter = "5.7.2" +postgresql = "42.3.4" +testcontainers = "1.17.1" [libraries] fasterxml = { module = "com.fasterxml.jackson:jackson-bom", version.ref = "fasterxml_version" } @@ -25,6 +30,27 @@ log4j-impl = { module = "org.apache.logging.log4j:log4j-slf4j-impl", version.ref log4j-web = { module = "org.apache.logging.log4j:log4j-web", version.ref = "log4j" } jul-to-slf4j = { module = "org.slf4j:jul-to-slf4j", version.ref = "slf4j" } jcl-over-slf4j = { module = "org.slf4j:jcl-over-slf4j", version.ref = "slf4j" } +hikaricp = { module = "com.zaxxer:HikariCP", version.ref = "hikaricp" } +jooq = { module = "org.jooq:jooq", version.ref = "jooq" } +jooq-codegen = { module = "org.jooq:jooq-codegen", version.ref = "jooq" } +jooq-meta = { module = "org.jooq:jooq-meta", version.ref = "jooq" } +postgresql = { module = "org.postgresql:postgresql", version.ref = "postgresql" } +flyway-core = { module = "org.flywaydb:flyway-core", version.ref = "flyway" } +testcontainers = { module = "org.testcontainers:testcontainers", version.ref = "testcontainers" } +testcontainers-cassandra = { module = "org.testcontainers:cassandra", version.ref = "testcontainers" } +testcontainers-clickhouse = { module = "org.testcontainers:clickhouse", version.ref = "testcontainers" } +testcontainers-cockroachdb = { module = "org.testcontainers:cockroachdb", version.ref = "testcontainers" } +testcontainers-db2 = { module = "org.testcontainers:db2", version.ref = "testcontainers" } +testcontainers-elasticsearch = { module = "org.testcontainers:elasticsearch", version.ref = "testcontainers" } +testcontainers-jdbc = { module = "org.testcontainers:jdbc", version.ref = "testcontainers" } +testcontainers-kafka = { module = "org.testcontainers:kafka", version.ref = "testcontainers" } +testcontainers-mariadb = { module = "org.testcontainers:mariadb", version.ref = "testcontainers" } +testcontainers-mongodb = { module = "org.testcontainers:mongodb", version.ref = "testcontainers" } +testcontainers-mssqlserver = { module = "org.testcontainers:mssqlserver", version.ref = "testcontainers" } +testcontainers-mysql = { module = "org.testcontainers:mysql", version.ref = "testcontainers" } +testcontainers-oracle-xe = { module = "org.testcontainers:oracle-xe", version.ref = "testcontainers" } +testcontainers-postgresql = { module = "org.testcontainers:postgresql", version.ref = "testcontainers" } +testcontainers-pulsar = { module = "org.testcontainers:pulsar", version.ref = "testcontainers" } log4j-over-slf4j = { module = "org.slf4j:log4j-over-slf4j", version.ref = "slf4j" } appender-log4j2 = { module = "com.therealvan:appender-log4j2", version = "3.6.0" } aws-java-sdk-s3 = { module = "com.amazonaws:aws-java-sdk-s3", version = "1.12.6" } From 734caece05d0073d681937ca807af36dab233b01 Mon Sep 17 00:00:00 2001 From: midavadim Date: Mon, 9 May 2022 22:37:40 +0300 Subject: [PATCH 08/55] updated releaseStage to beta (#12706) --- .../init/src/main/resources/seed/source_definitions.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index 416749b0f5ff9..4ca4011de19d5 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -918,7 +918,7 @@ documentationUrl: https://docs.airbyte.io/integrations/sources/tiktok-marketing icon: tiktok.svg sourceType: api - releaseStage: alpha + releaseStage: beta - name: Trello sourceDefinitionId: 8da67652-004c-11ec-9a03-0242ac130003 dockerRepository: airbyte/source-trello From 559b6389ba6b3beaadeb3e7f6ccea7c4ee777552 Mon Sep 17 00:00:00 2001 From: Octavia Squidington III <90398440+octavia-squidington-iii@users.noreply.github.com> Date: Mon, 9 May 2022 17:12:17 -0300 Subject: [PATCH 09/55] Bump Airbyte version from 0.37.1-alpha to 0.38.0-alpha (#12707) Co-authored-by: jdpgrailsdev --- .bumpversion.cfg | 2 +- .env | 2 +- airbyte-bootloader/Dockerfile | 2 +- airbyte-container-orchestrator/Dockerfile | 2 +- airbyte-metrics/reporter/Dockerfile | 2 +- airbyte-scheduler/app/Dockerfile | 2 +- airbyte-server/Dockerfile | 2 +- airbyte-webapp/package-lock.json | 4 ++-- airbyte-webapp/package.json | 2 +- airbyte-workers/Dockerfile | 2 +- charts/airbyte/Chart.yaml | 2 +- charts/airbyte/README.md | 10 +++++----- charts/airbyte/values.yaml | 10 +++++----- docs/operator-guides/upgrading-airbyte.md | 2 +- kube/overlays/stable-with-resource-limits/.env | 2 +- .../stable-with-resource-limits/kustomization.yaml | 12 ++++++------ kube/overlays/stable/.env | 2 +- kube/overlays/stable/kustomization.yaml | 12 ++++++------ octavia-cli/Dockerfile | 2 +- octavia-cli/README.md | 2 +- octavia-cli/install.sh | 2 +- octavia-cli/setup.py | 2 +- 22 files changed, 41 insertions(+), 41 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 2989b05fd76dd..e27cc15fb054f 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.37.1-alpha +current_version = 0.38.0-alpha commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\-[a-z]+)? diff --git a/.env b/.env index 7419c1d23c533..cd01404075e79 100644 --- a/.env +++ b/.env @@ -10,7 +10,7 @@ ### SHARED ### -VERSION=0.37.1-alpha +VERSION=0.38.0-alpha # When using the airbyte-db via default docker image CONFIG_ROOT=/data diff --git a/airbyte-bootloader/Dockerfile b/airbyte-bootloader/Dockerfile index 014db939009ba..11b728bb68288 100644 --- a/airbyte-bootloader/Dockerfile +++ b/airbyte-bootloader/Dockerfile @@ -1,7 +1,7 @@ ARG JDK_VERSION=17.0.1 FROM openjdk:${JDK_VERSION}-slim -ARG VERSION=0.37.1-alpha +ARG VERSION=0.38.0-alpha ENV APPLICATION airbyte-bootloader ENV VERSION ${VERSION} diff --git a/airbyte-container-orchestrator/Dockerfile b/airbyte-container-orchestrator/Dockerfile index 01d2c29f4f31b..bdd14adc9e4a2 100644 --- a/airbyte-container-orchestrator/Dockerfile +++ b/airbyte-container-orchestrator/Dockerfile @@ -26,7 +26,7 @@ RUN echo "deb [signed-by=/usr/share/keyrings/kubernetes-archive-keyring.gpg] htt RUN apt-get update && apt-get install -y kubectl # Don't change this manually. Bump version expects to make moves based on this string -ARG VERSION=0.37.1-alpha +ARG VERSION=0.38.0-alpha ENV APPLICATION airbyte-container-orchestrator ENV VERSION=${VERSION} diff --git a/airbyte-metrics/reporter/Dockerfile b/airbyte-metrics/reporter/Dockerfile index ff0f2e6f9e035..5d3e153a9fe84 100644 --- a/airbyte-metrics/reporter/Dockerfile +++ b/airbyte-metrics/reporter/Dockerfile @@ -1,7 +1,7 @@ ARG JDK_VERSION=17.0.1 FROM openjdk:${JDK_VERSION}-slim AS metrics-reporter -ARG VERSION=0.37.1-alpha +ARG VERSION=0.38.0-alpha ENV APPLICATION airbyte-metrics-reporter ENV VERSION ${VERSION} diff --git a/airbyte-scheduler/app/Dockerfile b/airbyte-scheduler/app/Dockerfile index e7ec7006c7099..6acd8cdba6b95 100644 --- a/airbyte-scheduler/app/Dockerfile +++ b/airbyte-scheduler/app/Dockerfile @@ -1,7 +1,7 @@ ARG JDK_VERSION=17.0.1 FROM openjdk:${JDK_VERSION}-slim AS scheduler -ARG VERSION=0.37.1-alpha +ARG VERSION=0.38.0-alpha ENV APPLICATION airbyte-scheduler ENV VERSION ${VERSION} diff --git a/airbyte-server/Dockerfile b/airbyte-server/Dockerfile index ded7fcf0492ea..3d7b0ad520f3d 100644 --- a/airbyte-server/Dockerfile +++ b/airbyte-server/Dockerfile @@ -3,7 +3,7 @@ FROM openjdk:${JDK_VERSION}-slim AS server EXPOSE 8000 -ARG VERSION=0.37.1-alpha +ARG VERSION=0.38.0-alpha ENV APPLICATION airbyte-server ENV VERSION ${VERSION} diff --git a/airbyte-webapp/package-lock.json b/airbyte-webapp/package-lock.json index bc56a01f14f4b..9d22c4fb39344 100644 --- a/airbyte-webapp/package-lock.json +++ b/airbyte-webapp/package-lock.json @@ -1,12 +1,12 @@ { "name": "airbyte-webapp", - "version": "0.37.1-alpha", + "version": "0.38.0-alpha", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "airbyte-webapp", - "version": "0.37.1-alpha", + "version": "0.38.0-alpha", "dependencies": { "@fortawesome/fontawesome-svg-core": "^6.1.1", "@fortawesome/free-brands-svg-icons": "^6.1.1", diff --git a/airbyte-webapp/package.json b/airbyte-webapp/package.json index 06352b6f969b8..f868275c6f13e 100644 --- a/airbyte-webapp/package.json +++ b/airbyte-webapp/package.json @@ -1,6 +1,6 @@ { "name": "airbyte-webapp", - "version": "0.37.1-alpha", + "version": "0.38.0-alpha", "private": true, "engines": { "node": ">=16.0.0" diff --git a/airbyte-workers/Dockerfile b/airbyte-workers/Dockerfile index 5fa2a01b23084..0925e82d50e4a 100644 --- a/airbyte-workers/Dockerfile +++ b/airbyte-workers/Dockerfile @@ -25,7 +25,7 @@ RUN curl -fsSLo /usr/share/keyrings/kubernetes-archive-keyring.gpg https://packa RUN echo "deb [signed-by=/usr/share/keyrings/kubernetes-archive-keyring.gpg] https://apt.kubernetes.io/ kubernetes-xenial main" | tee /etc/apt/sources.list.d/kubernetes.list RUN apt-get update && apt-get install -y kubectl -ARG VERSION=0.37.1-alpha +ARG VERSION=0.38.0-alpha ENV APPLICATION airbyte-workers ENV VERSION ${VERSION} diff --git a/charts/airbyte/Chart.yaml b/charts/airbyte/Chart.yaml index 1df7f1b1568f5..fab282b1d580b 100644 --- a/charts/airbyte/Chart.yaml +++ b/charts/airbyte/Chart.yaml @@ -21,7 +21,7 @@ version: 0.3.2 # incremented each time you make changes to the application. Versions are not expected to # follow Semantic Versioning. They should reflect the version the application is using. # It is recommended to use it with quotes. -appVersion: "0.37.1-alpha" +appVersion: "0.38.0-alpha" dependencies: - name: common diff --git a/charts/airbyte/README.md b/charts/airbyte/README.md index 3f9bfef69a996..9e18842a84c6e 100644 --- a/charts/airbyte/README.md +++ b/charts/airbyte/README.md @@ -30,7 +30,7 @@ Helm charts for Airbyte. | `webapp.replicaCount` | Number of webapp replicas | `1` | | `webapp.image.repository` | The repository to use for the airbyte webapp image. | `airbyte/webapp` | | `webapp.image.pullPolicy` | the pull policy to use for the airbyte webapp image | `IfNotPresent` | -| `webapp.image.tag` | The airbyte webapp image tag. Defaults to the chart's AppVersion | `0.37.1-alpha` | +| `webapp.image.tag` | The airbyte webapp image tag. Defaults to the chart's AppVersion | `0.38.0-alpha` | | `webapp.podAnnotations` | Add extra annotations to the webapp pod(s) | `{}` | | `webapp.containerSecurityContext` | Security context for the container | `{}` | | `webapp.livenessProbe.enabled` | Enable livenessProbe on the webapp | `true` | @@ -72,7 +72,7 @@ Helm charts for Airbyte. | `scheduler.replicaCount` | Number of scheduler replicas | `1` | | `scheduler.image.repository` | The repository to use for the airbyte scheduler image. | `airbyte/scheduler` | | `scheduler.image.pullPolicy` | the pull policy to use for the airbyte scheduler image | `IfNotPresent` | -| `scheduler.image.tag` | The airbyte scheduler image tag. Defaults to the chart's AppVersion | `0.37.1-alpha` | +| `scheduler.image.tag` | The airbyte scheduler image tag. Defaults to the chart's AppVersion | `0.38.0-alpha` | | `scheduler.podAnnotations` | Add extra annotations to the scheduler pod | `{}` | | `scheduler.containerSecurityContext` | Security context for the container | `{}` | | `scheduler.livenessProbe.enabled` | Enable livenessProbe on the scheduler | `true` | @@ -135,7 +135,7 @@ Helm charts for Airbyte. | `server.replicaCount` | Number of server replicas | `1` | | `server.image.repository` | The repository to use for the airbyte server image. | `airbyte/server` | | `server.image.pullPolicy` | the pull policy to use for the airbyte server image | `IfNotPresent` | -| `server.image.tag` | The airbyte server image tag. Defaults to the chart's AppVersion | `0.37.1-alpha` | +| `server.image.tag` | The airbyte server image tag. Defaults to the chart's AppVersion | `0.38.0-alpha` | | `server.podAnnotations` | Add extra annotations to the server pod | `{}` | | `server.containerSecurityContext` | Security context for the container | `{}` | | `server.livenessProbe.enabled` | Enable livenessProbe on the server | `true` | @@ -170,7 +170,7 @@ Helm charts for Airbyte. | `worker.replicaCount` | Number of worker replicas | `1` | | `worker.image.repository` | The repository to use for the airbyte worker image. | `airbyte/worker` | | `worker.image.pullPolicy` | the pull policy to use for the airbyte worker image | `IfNotPresent` | -| `worker.image.tag` | The airbyte worker image tag. Defaults to the chart's AppVersion | `0.37.1-alpha` | +| `worker.image.tag` | The airbyte worker image tag. Defaults to the chart's AppVersion | `0.38.0-alpha` | | `worker.podAnnotations` | Add extra annotations to the worker pod(s) | `{}` | | `worker.containerSecurityContext` | Security context for the container | `{}` | | `worker.livenessProbe.enabled` | Enable livenessProbe on the worker | `true` | @@ -202,7 +202,7 @@ Helm charts for Airbyte. | ----------------------------- | -------------------------------------------------------------------- | -------------------- | | `bootloader.image.repository` | The repository to use for the airbyte bootloader image. | `airbyte/bootloader` | | `bootloader.image.pullPolicy` | the pull policy to use for the airbyte bootloader image | `IfNotPresent` | -| `bootloader.image.tag` | The airbyte bootloader image tag. Defaults to the chart's AppVersion | `0.37.1-alpha` | +| `bootloader.image.tag` | The airbyte bootloader image tag. Defaults to the chart's AppVersion | `0.38.0-alpha` | | `bootloader.podAnnotations` | Add extra annotations to the bootloader pod | `{}` | | `bootloader.nodeSelector` | Node labels for pod assignment | `{}` | | `bootloader.tolerations` | Tolerations for worker pod assignment. | `[]` | diff --git a/charts/airbyte/values.yaml b/charts/airbyte/values.yaml index 19ba227aad67e..21cbd5baaf892 100644 --- a/charts/airbyte/values.yaml +++ b/charts/airbyte/values.yaml @@ -41,7 +41,7 @@ webapp: image: repository: airbyte/webapp pullPolicy: IfNotPresent - tag: 0.37.1-alpha + tag: 0.38.0-alpha ## @param webapp.podAnnotations [object] Add extra annotations to the webapp pod(s) ## @@ -207,7 +207,7 @@ scheduler: image: repository: airbyte/scheduler pullPolicy: IfNotPresent - tag: 0.37.1-alpha + tag: 0.38.0-alpha ## @param scheduler.podAnnotations [object] Add extra annotations to the scheduler pod ## @@ -438,7 +438,7 @@ server: image: repository: airbyte/server pullPolicy: IfNotPresent - tag: 0.37.1-alpha + tag: 0.38.0-alpha ## @param server.podAnnotations [object] Add extra annotations to the server pod ## @@ -565,7 +565,7 @@ worker: image: repository: airbyte/worker pullPolicy: IfNotPresent - tag: 0.37.1-alpha + tag: 0.38.0-alpha ## @param worker.podAnnotations [object] Add extra annotations to the worker pod(s) ## @@ -683,7 +683,7 @@ bootloader: image: repository: airbyte/bootloader pullPolicy: IfNotPresent - tag: 0.37.1-alpha + tag: 0.38.0-alpha ## @param bootloader.podAnnotations [object] Add extra annotations to the bootloader pod ## diff --git a/docs/operator-guides/upgrading-airbyte.md b/docs/operator-guides/upgrading-airbyte.md index 40f623d854e56..081d2f20afcb5 100644 --- a/docs/operator-guides/upgrading-airbyte.md +++ b/docs/operator-guides/upgrading-airbyte.md @@ -103,7 +103,7 @@ If you are upgrading from (i.e. your current version of Airbyte is) Airbyte vers Here's an example of what it might look like with the values filled in. It assumes that the downloaded `airbyte_archive.tar.gz` is in `/tmp`. ```bash - docker run --rm -v /tmp:/config airbyte/migration:0.37.1-alpha --\ + docker run --rm -v /tmp:/config airbyte/migration:0.38.0-alpha --\ --input /config/airbyte_archive.tar.gz\ --output /config/airbyte_archive_migrated.tar.gz ``` diff --git a/kube/overlays/stable-with-resource-limits/.env b/kube/overlays/stable-with-resource-limits/.env index 7f9beea2321fe..9d3d78ea61dbf 100644 --- a/kube/overlays/stable-with-resource-limits/.env +++ b/kube/overlays/stable-with-resource-limits/.env @@ -1,4 +1,4 @@ -AIRBYTE_VERSION=0.37.1-alpha +AIRBYTE_VERSION=0.38.0-alpha # Airbyte Internal Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db DATABASE_HOST=airbyte-db-svc diff --git a/kube/overlays/stable-with-resource-limits/kustomization.yaml b/kube/overlays/stable-with-resource-limits/kustomization.yaml index 89e66d5d5bc14..71ad4ddec69b6 100644 --- a/kube/overlays/stable-with-resource-limits/kustomization.yaml +++ b/kube/overlays/stable-with-resource-limits/kustomization.yaml @@ -8,17 +8,17 @@ bases: images: - name: airbyte/db - newTag: 0.37.1-alpha + newTag: 0.38.0-alpha - name: airbyte/bootloader - newTag: 0.37.1-alpha + newTag: 0.38.0-alpha - name: airbyte/scheduler - newTag: 0.37.1-alpha + newTag: 0.38.0-alpha - name: airbyte/server - newTag: 0.37.1-alpha + newTag: 0.38.0-alpha - name: airbyte/webapp - newTag: 0.37.1-alpha + newTag: 0.38.0-alpha - name: airbyte/worker - newTag: 0.37.1-alpha + newTag: 0.38.0-alpha - name: temporalio/auto-setup newTag: 1.7.0 diff --git a/kube/overlays/stable/.env b/kube/overlays/stable/.env index a84d36649409b..4ac1e1f2282cf 100644 --- a/kube/overlays/stable/.env +++ b/kube/overlays/stable/.env @@ -1,4 +1,4 @@ -AIRBYTE_VERSION=0.37.1-alpha +AIRBYTE_VERSION=0.38.0-alpha # Airbyte Internal Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db DATABASE_HOST=airbyte-db-svc diff --git a/kube/overlays/stable/kustomization.yaml b/kube/overlays/stable/kustomization.yaml index d512effa07a14..b7d826d7927f4 100644 --- a/kube/overlays/stable/kustomization.yaml +++ b/kube/overlays/stable/kustomization.yaml @@ -8,17 +8,17 @@ bases: images: - name: airbyte/db - newTag: 0.37.1-alpha + newTag: 0.38.0-alpha - name: airbyte/bootloader - newTag: 0.37.1-alpha + newTag: 0.38.0-alpha - name: airbyte/scheduler - newTag: 0.37.1-alpha + newTag: 0.38.0-alpha - name: airbyte/server - newTag: 0.37.1-alpha + newTag: 0.38.0-alpha - name: airbyte/webapp - newTag: 0.37.1-alpha + newTag: 0.38.0-alpha - name: airbyte/worker - newTag: 0.37.1-alpha + newTag: 0.38.0-alpha - name: temporalio/auto-setup newTag: 1.7.0 diff --git a/octavia-cli/Dockerfile b/octavia-cli/Dockerfile index 30f7fae5c4367..d94558cafb812 100644 --- a/octavia-cli/Dockerfile +++ b/octavia-cli/Dockerfile @@ -14,5 +14,5 @@ USER octavia-cli WORKDIR /home/octavia-project ENTRYPOINT ["octavia"] -LABEL io.airbyte.version=0.37.1-alpha +LABEL io.airbyte.version=0.38.0-alpha LABEL io.airbyte.name=airbyte/octavia-cli diff --git a/octavia-cli/README.md b/octavia-cli/README.md index ecb5fdd3ca544..5431288886d59 100644 --- a/octavia-cli/README.md +++ b/octavia-cli/README.md @@ -105,7 +105,7 @@ This script: ```bash touch ~/.octavia # Create a file to store env variables that will be mapped the octavia-cli container mkdir my_octavia_project_directory # Create your octavia project directory where YAML configurations will be stored. -docker run --name octavia-cli -i --rm -v my_octavia_project_directory:/home/octavia-project --network host --user $(id -u):$(id -g) --env-file ~/.octavia airbyte/octavia-cli:0.37.1-alpha +docker run --name octavia-cli -i --rm -v my_octavia_project_directory:/home/octavia-project --network host --user $(id -u):$(id -g) --env-file ~/.octavia airbyte/octavia-cli:0.38.0-alpha ``` ### Using `docker-compose` diff --git a/octavia-cli/install.sh b/octavia-cli/install.sh index 3a80c50568e8b..3ade2bcef14f4 100755 --- a/octavia-cli/install.sh +++ b/octavia-cli/install.sh @@ -3,7 +3,7 @@ # This install scripts currently only works for ZSH and Bash profiles. # It creates an octavia alias in your profile bound to a docker run command and your current user. -VERSION=0.37.1-alpha +VERSION=0.38.0-alpha OCTAVIA_ENV_FILE=${HOME}/.octavia detect_profile() { diff --git a/octavia-cli/setup.py b/octavia-cli/setup.py index 1955ad7271293..ebeafe6e3b0f8 100644 --- a/octavia-cli/setup.py +++ b/octavia-cli/setup.py @@ -15,7 +15,7 @@ setup( name="octavia-cli", - version="0.37.1", + version="0.38.0", description="A command line interface to manage Airbyte configurations", long_description=README, author="Airbyte", From 02dec04b74d7b08791be3d43ee701bb32b72ec92 Mon Sep 17 00:00:00 2001 From: Anne <102554163+alovew@users.noreply.github.com> Date: Mon, 9 May 2022 15:53:19 -0700 Subject: [PATCH 10/55] Add schema validation to replication worker (#12231) * Add record schema validation to replication worker Co-authored-by: Lake Mossman --- .../ReplicationJobOrchestrator.java | 4 +- .../workers/DefaultReplicationWorker.java | 18 +++- .../RecordSchemaValidationException.java | 22 +++++ .../workers/RecordSchemaValidator.java | 56 +++++++++++++ .../java/io/airbyte/workers/WorkerUtils.java | 17 ++++ .../sync/ReplicationActivityImpl.java | 5 +- .../workers/DefaultReplicationWorkerTest.java | 82 +++++++++++++++---- .../workers/RecordSchemaValidatorTest.java | 43 ++++++++++ .../io/airbyte/workers/TestConfigHelpers.java | 22 ++++- .../io/airbyte/workers/WorkerUtilsTest.java | 23 ++++++ .../airbyte/AirbyteMessageUtils.java | 8 +- 11 files changed, 273 insertions(+), 27 deletions(-) create mode 100644 airbyte-workers/src/main/java/io/airbyte/workers/RecordSchemaValidationException.java create mode 100644 airbyte-workers/src/main/java/io/airbyte/workers/RecordSchemaValidator.java create mode 100644 airbyte-workers/src/test/java/io/airbyte/workers/RecordSchemaValidatorTest.java diff --git a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/ReplicationJobOrchestrator.java b/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/ReplicationJobOrchestrator.java index 476df67e68e3e..c154996dff307 100644 --- a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/ReplicationJobOrchestrator.java +++ b/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/ReplicationJobOrchestrator.java @@ -11,6 +11,7 @@ import io.airbyte.scheduler.models.IntegrationLauncherConfig; import io.airbyte.scheduler.models.JobRunConfig; import io.airbyte.workers.DefaultReplicationWorker; +import io.airbyte.workers.RecordSchemaValidator; import io.airbyte.workers.ReplicationWorker; import io.airbyte.workers.WorkerConfigs; import io.airbyte.workers.WorkerConstants; @@ -95,7 +96,8 @@ public Optional runJob() throws Exception { airbyteSource, new NamespacingMapper(syncInput.getNamespaceDefinition(), syncInput.getNamespaceFormat(), syncInput.getPrefix()), new DefaultAirbyteDestination(workerConfigs, destinationLauncher), - new AirbyteMessageTracker()); + new AirbyteMessageTracker(), + new RecordSchemaValidator(WorkerUtils.mapStreamNamesToSchemas(syncInput))); log.info("Running replication worker..."); final Path jobRoot = WorkerUtils.getJobRoot(configs.getWorkspaceRoot(), jobRunConfig.getJobId(), jobRunConfig.getAttemptId()); diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/DefaultReplicationWorker.java b/airbyte-workers/src/main/java/io/airbyte/workers/DefaultReplicationWorker.java index 5d44735651c94..c48b686df6bd6 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/DefaultReplicationWorker.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/DefaultReplicationWorker.java @@ -68,13 +68,15 @@ public class DefaultReplicationWorker implements ReplicationWorker { private final ExecutorService executors; private final AtomicBoolean cancelled; private final AtomicBoolean hasFailed; + private final RecordSchemaValidator recordSchemaValidator; public DefaultReplicationWorker(final String jobId, final int attempt, final AirbyteSource source, final AirbyteMapper mapper, final AirbyteDestination destination, - final MessageTracker messageTracker) { + final MessageTracker messageTracker, + final RecordSchemaValidator recordSchemaValidator) { this.jobId = jobId; this.attempt = attempt; this.source = source; @@ -82,6 +84,7 @@ public DefaultReplicationWorker(final String jobId, this.destination = destination; this.messageTracker = messageTracker; this.executors = Executors.newFixedThreadPool(2); + this.recordSchemaValidator = recordSchemaValidator; this.cancelled = new AtomicBoolean(false); this.hasFailed = new AtomicBoolean(false); @@ -142,7 +145,7 @@ public ReplicationOutput run(final StandardSyncInput syncInput, final Path jobRo }); final CompletableFuture replicationThreadFuture = CompletableFuture.runAsync( - getReplicationRunnable(source, destination, cancelled, mapper, messageTracker, mdc), + getReplicationRunnable(source, destination, cancelled, mapper, messageTracker, mdc, recordSchemaValidator), executors).whenComplete((msg, ex) -> { if (ex != null) { if (ex.getCause() instanceof SourceException) { @@ -273,7 +276,8 @@ private static Runnable getReplicationRunnable(final AirbyteSource source, final AtomicBoolean cancelled, final AirbyteMapper mapper, final MessageTracker messageTracker, - final Map mdc) { + final Map mdc, + final RecordSchemaValidator recordSchemaValidator) { return () -> { MDC.setContextMap(mdc); LOGGER.info("Replication thread started."); @@ -287,6 +291,14 @@ private static Runnable getReplicationRunnable(final AirbyteSource source, throw new SourceException("Source process read attempt failed", e); } if (messageOptional.isPresent()) { + if (messageOptional.get().getRecord() != null) { + try { + recordSchemaValidator.validateSchema(messageOptional.get().getRecord()); + } catch (final RecordSchemaValidationException e) { + LOGGER.warn(e.getMessage()); + } + } + final AirbyteMessage message = mapper.mapMessage(messageOptional.get()); messageTracker.acceptFromSource(message); diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/RecordSchemaValidationException.java b/airbyte-workers/src/main/java/io/airbyte/workers/RecordSchemaValidationException.java new file mode 100644 index 0000000000000..814eb8fdd5e85 --- /dev/null +++ b/airbyte-workers/src/main/java/io/airbyte/workers/RecordSchemaValidationException.java @@ -0,0 +1,22 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.workers; + +/** + * Exception thrown by the RecordSchemaValidator during a sync when AirbyteRecordMessage data does + * not conform to its stream's defined JSON schema + */ + +public class RecordSchemaValidationException extends Exception { + + public RecordSchemaValidationException(final String message) { + super(message); + } + + public RecordSchemaValidationException(final String message, final Throwable cause) { + super(message, cause); + } + +} diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/RecordSchemaValidator.java b/airbyte-workers/src/main/java/io/airbyte/workers/RecordSchemaValidator.java new file mode 100644 index 0000000000000..75368be3aa6e7 --- /dev/null +++ b/airbyte-workers/src/main/java/io/airbyte/workers/RecordSchemaValidator.java @@ -0,0 +1,56 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.workers; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import io.airbyte.protocol.models.AirbyteRecordMessage; +import io.airbyte.validation.json.JsonSchemaValidator; +import io.airbyte.validation.json.JsonValidationException; +import java.util.Map; +import java.util.Objects; + +/** + * Validates that AirbyteRecordMessage data conforms to the JSON schema defined by the source's + * configured catalog + */ + +public class RecordSchemaValidator { + + private final Map streams; + + public RecordSchemaValidator(final Map streamNamesToSchemas) { + // streams is Map of a stream source namespace + name mapped to the stream schema + // for easy access when we check each record's schema + this.streams = streamNamesToSchemas; + } + + /** + * Takes an AirbyteRecordMessage and uses the JsonSchemaValidator to validate that its data conforms + * to the stream's schema If it does not, this method throws a RecordSchemaValidationException + * + * @param message + * @throws RecordSchemaValidationException + */ + public void validateSchema(final AirbyteRecordMessage message) throws RecordSchemaValidationException { + // the stream this message corresponds to, including the stream namespace + final String messageStream = String.format("%s" + message.getStream(), Objects.toString(message.getNamespace(), "")); + final JsonNode messageData = message.getData(); + final JsonNode matchingSchema = streams.get(messageStream); + + final JsonSchemaValidator validator = new JsonSchemaValidator(); + + // We must choose a JSON validator version for validating the schema + // Rather than allowing connectors to use any version, we enforce validation using V7 + ((ObjectNode) matchingSchema).put("$schema", "http://json-schema.org/draft-07/schema#"); + + try { + validator.ensure(matchingSchema, messageData); + } catch (final JsonValidationException e) { + throw new RecordSchemaValidationException(String.format("Record schema validation failed. Errors: %s", e.getMessage()), e); + } + } + +} diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/WorkerUtils.java b/airbyte-workers/src/main/java/io/airbyte/workers/WorkerUtils.java index 6aead4f4f37e2..40f4ca963456f 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/WorkerUtils.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/WorkerUtils.java @@ -4,6 +4,7 @@ package io.airbyte.workers; +import com.fasterxml.jackson.databind.JsonNode; import io.airbyte.config.Configs.WorkerEnvironment; import io.airbyte.config.StandardSyncInput; import io.airbyte.config.WorkerDestinationConfig; @@ -13,7 +14,10 @@ import java.nio.file.Path; import java.time.Duration; import java.time.temporal.ChronoUnit; +import java.util.Map; +import java.util.Objects; import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -95,6 +99,19 @@ public static WorkerDestinationConfig syncToWorkerDestinationConfig(final Standa .withState(sync.getState()); } + public static Map mapStreamNamesToSchemas(final StandardSyncInput syncInput) { + final String streamPrefix = syncInput.getPrefix(); + return syncInput.getCatalog().getStreams().stream().collect( + Collectors.toMap( + k -> { + final String namespace = Objects.toString(k.getStream().getNamespace(), "").trim(); + final String name = k.getStream().getName().trim(); + return namespace + name; + }, + v -> v.getStream().getJsonSchema())); + + } + // todo (cgardens) - there are 2 sources of truth for job path. we need to reduce this down to one, // once we are fully on temporal. public static Path getJobRoot(final Path workspaceRoot, final JobRunConfig jobRunConfig) { diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/ReplicationActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/ReplicationActivityImpl.java index 33f063bb781ab..5180c620c7ec8 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/ReplicationActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/ReplicationActivityImpl.java @@ -21,11 +21,13 @@ import io.airbyte.scheduler.models.JobRunConfig; import io.airbyte.scheduler.persistence.JobPersistence; import io.airbyte.workers.DefaultReplicationWorker; +import io.airbyte.workers.RecordSchemaValidator; import io.airbyte.workers.Worker; import io.airbyte.workers.WorkerApp; import io.airbyte.workers.WorkerApp.ContainerOrchestratorConfig; import io.airbyte.workers.WorkerConfigs; import io.airbyte.workers.WorkerConstants; +import io.airbyte.workers.WorkerUtils; import io.airbyte.workers.process.AirbyteIntegrationLauncher; import io.airbyte.workers.process.IntegrationLauncher; import io.airbyte.workers.process.ProcessFactory; @@ -210,7 +212,8 @@ private CheckedSupplier, Exception> airbyteSource, new NamespacingMapper(syncInput.getNamespaceDefinition(), syncInput.getNamespaceFormat(), syncInput.getPrefix()), new DefaultAirbyteDestination(workerConfigs, destinationLauncher), - new AirbyteMessageTracker()); + new AirbyteMessageTracker(), + new RecordSchemaValidator(WorkerUtils.mapStreamNamesToSchemas(syncInput))); }; } diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/DefaultReplicationWorkerTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/DefaultReplicationWorkerTest.java index 659e732f4c0ab..ca66925faea18 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/DefaultReplicationWorkerTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/DefaultReplicationWorkerTest.java @@ -75,6 +75,7 @@ class DefaultReplicationWorkerTest { private static final String FIELD_NAME = "favorite_color"; private static final AirbyteMessage RECORD_MESSAGE1 = AirbyteMessageUtils.createRecordMessage(STREAM_NAME, FIELD_NAME, "blue"); private static final AirbyteMessage RECORD_MESSAGE2 = AirbyteMessageUtils.createRecordMessage(STREAM_NAME, FIELD_NAME, "yellow"); + private static final AirbyteMessage RECORD_MESSAGE3 = AirbyteMessageUtils.createRecordMessage(STREAM_NAME, FIELD_NAME, 3); private static final AirbyteMessage STATE_MESSAGE = AirbyteMessageUtils.createStateMessage("checkpoint", "1"); private Path jobRoot; @@ -85,6 +86,7 @@ class DefaultReplicationWorkerTest { private WorkerSourceConfig sourceConfig; private WorkerDestinationConfig destinationConfig; private AirbyteMessageTracker messageTracker; + private RecordSchemaValidator recordSchemaValidator; @SuppressWarnings("unchecked") @BeforeEach @@ -103,6 +105,7 @@ void setup() throws Exception { mapper = mock(NamespacingMapper.class); destination = mock(AirbyteDestination.class); messageTracker = mock(AirbyteMessageTracker.class); + recordSchemaValidator = mock(RecordSchemaValidator.class); when(source.isFinished()).thenReturn(false, false, false, true); when(destination.isFinished()).thenReturn(false, false, false, true); @@ -111,6 +114,7 @@ void setup() throws Exception { when(mapper.mapCatalog(destinationConfig.getCatalog())).thenReturn(destinationConfig.getCatalog()); when(mapper.mapMessage(RECORD_MESSAGE1)).thenReturn(RECORD_MESSAGE1); when(mapper.mapMessage(RECORD_MESSAGE2)).thenReturn(RECORD_MESSAGE2); + when(mapper.mapMessage(RECORD_MESSAGE3)).thenReturn(RECORD_MESSAGE3); } @AfterEach @@ -126,7 +130,8 @@ void test() throws Exception { source, mapper, destination, - messageTracker); + messageTracker, + recordSchemaValidator); worker.run(syncInput, jobRoot); @@ -136,11 +141,13 @@ void test() throws Exception { verify(destination).accept(RECORD_MESSAGE2); verify(source, atLeastOnce()).close(); verify(destination).close(); + verify(recordSchemaValidator).validateSchema(RECORD_MESSAGE1.getRecord()); + verify(recordSchemaValidator).validateSchema(RECORD_MESSAGE2.getRecord()); } @Test - void testSourceNonZeroExitValue() throws Exception { - when(source.getExitValue()).thenReturn(1); + void testInvalidSchema() throws Exception { + when(source.attemptRead()).thenReturn(Optional.of(RECORD_MESSAGE1), Optional.of(RECORD_MESSAGE2), Optional.of(RECORD_MESSAGE3)); final ReplicationWorker worker = new DefaultReplicationWorker( JOB_ID, @@ -148,8 +155,33 @@ void testSourceNonZeroExitValue() throws Exception { source, mapper, destination, - messageTracker); + messageTracker, + recordSchemaValidator); + + worker.run(syncInput, jobRoot); + + verify(source).start(sourceConfig, jobRoot); + verify(destination).start(destinationConfig, jobRoot); + verify(destination).accept(RECORD_MESSAGE1); + verify(destination).accept(RECORD_MESSAGE2); + verify(recordSchemaValidator).validateSchema(RECORD_MESSAGE1.getRecord()); + verify(recordSchemaValidator).validateSchema(RECORD_MESSAGE2.getRecord()); + verify(recordSchemaValidator).validateSchema(RECORD_MESSAGE3.getRecord()); + verify(source).close(); + verify(destination).close(); + } + @Test + void testSourceNonZeroExitValue() throws Exception { + when(source.getExitValue()).thenReturn(1); + final ReplicationWorker worker = new DefaultReplicationWorker( + JOB_ID, + JOB_ATTEMPT, + source, + mapper, + destination, + messageTracker, + recordSchemaValidator); final ReplicationOutput output = worker.run(syncInput, jobRoot); assertEquals(ReplicationStatus.FAILED, output.getReplicationAttemptSummary().getStatus()); assertTrue(output.getFailures().stream().anyMatch(f -> f.getFailureOrigin().equals(FailureOrigin.SOURCE))); @@ -167,7 +199,8 @@ void testReplicationRunnableSourceFailure() throws Exception { source, mapper, destination, - messageTracker); + messageTracker, + recordSchemaValidator); final ReplicationOutput output = worker.run(syncInput, jobRoot); assertEquals(ReplicationStatus.FAILED, output.getReplicationAttemptSummary().getStatus()); @@ -187,7 +220,8 @@ void testReplicationRunnableDestinationFailure() throws Exception { source, mapper, destination, - messageTracker); + messageTracker, + recordSchemaValidator); final ReplicationOutput output = worker.run(syncInput, jobRoot); assertEquals(ReplicationStatus.FAILED, output.getReplicationAttemptSummary().getStatus()); @@ -207,7 +241,8 @@ void testReplicationRunnableWorkerFailure() throws Exception { source, mapper, destination, - messageTracker); + messageTracker, + recordSchemaValidator); final ReplicationOutput output = worker.run(syncInput, jobRoot); assertEquals(ReplicationStatus.FAILED, output.getReplicationAttemptSummary().getStatus()); @@ -225,7 +260,8 @@ void testDestinationNonZeroExitValue() throws Exception { source, mapper, destination, - messageTracker); + messageTracker, + recordSchemaValidator); final ReplicationOutput output = worker.run(syncInput, jobRoot); assertEquals(ReplicationStatus.FAILED, output.getReplicationAttemptSummary().getStatus()); @@ -244,7 +280,8 @@ void testDestinationRunnableDestinationFailure() throws Exception { source, mapper, destination, - messageTracker); + messageTracker, + recordSchemaValidator); final ReplicationOutput output = worker.run(syncInput, jobRoot); assertEquals(ReplicationStatus.FAILED, output.getReplicationAttemptSummary().getStatus()); @@ -264,7 +301,8 @@ void testDestinationRunnableWorkerFailure() throws Exception { source, mapper, destination, - messageTracker); + messageTracker, + recordSchemaValidator); final ReplicationOutput output = worker.run(syncInput, jobRoot); assertEquals(ReplicationStatus.FAILED, output.getReplicationAttemptSummary().getStatus()); @@ -285,7 +323,8 @@ void testLoggingInThreads() throws IOException, WorkerException { source, mapper, destination, - messageTracker); + messageTracker, + recordSchemaValidator); worker.run(syncInput, jobRoot); @@ -324,7 +363,8 @@ void testCancellation() throws InterruptedException { source, mapper, destination, - messageTracker); + messageTracker, + recordSchemaValidator); final Thread workerThread = new Thread(() -> { try { @@ -364,7 +404,8 @@ void testPopulatesOutputOnSuccess() throws WorkerException { source, mapper, destination, - messageTracker); + messageTracker, + recordSchemaValidator); final ReplicationOutput actual = worker.run(syncInput, jobRoot); final ReplicationOutput replicationOutput = new ReplicationOutput() @@ -416,7 +457,8 @@ void testPopulatesStateOnFailureIfAvailable() throws Exception { source, mapper, destination, - messageTracker); + messageTracker, + recordSchemaValidator); final ReplicationOutput actual = worker.run(syncInput, jobRoot); assertNotNull(actual); @@ -433,7 +475,8 @@ void testRetainsStateOnFailureIfNewStateNotAvailable() throws Exception { source, mapper, destination, - messageTracker); + messageTracker, + recordSchemaValidator); final ReplicationOutput actual = worker.run(syncInput, jobRoot); @@ -458,7 +501,8 @@ void testPopulatesStatsOnFailureIfAvailable() throws Exception { source, mapper, destination, - messageTracker); + messageTracker, + recordSchemaValidator); final ReplicationOutput actual = worker.run(syncInput, jobRoot); final SyncStats expectedTotalStats = new SyncStats() @@ -493,7 +537,8 @@ void testDoesNotPopulatesStateOnFailureIfNotAvailable() throws Exception { source, mapper, destination, - messageTracker); + messageTracker, + recordSchemaValidator); final ReplicationOutput actual = worker.run(syncInputWithoutState, jobRoot); @@ -511,7 +556,8 @@ void testDoesNotPopulateOnIrrecoverableFailure() { source, mapper, destination, - messageTracker); + messageTracker, + recordSchemaValidator); assertThrows(WorkerException.class, () -> worker.run(syncInput, jobRoot)); } diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/RecordSchemaValidatorTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/RecordSchemaValidatorTest.java new file mode 100644 index 0000000000000..7b8566e17695a --- /dev/null +++ b/airbyte-workers/src/test/java/io/airbyte/workers/RecordSchemaValidatorTest.java @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.workers; + +import static org.junit.jupiter.api.Assertions.assertThrows; + +import io.airbyte.config.StandardSync; +import io.airbyte.config.StandardSyncInput; +import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.workers.protocols.airbyte.AirbyteMessageUtils; +import org.apache.commons.lang3.tuple.ImmutablePair; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +public class RecordSchemaValidatorTest { + + private StandardSyncInput syncInput; + private static final String STREAM_NAME = "user_preferences"; + private static final String FIELD_NAME = "favorite_color"; + private static final AirbyteMessage VALID_RECORD = AirbyteMessageUtils.createRecordMessage(STREAM_NAME, FIELD_NAME, "blue"); + private static final AirbyteMessage INVALID_RECORD = AirbyteMessageUtils.createRecordMessage(STREAM_NAME, FIELD_NAME, 3); + + @BeforeEach + void setup() throws Exception { + final ImmutablePair syncPair = TestConfigHelpers.createSyncConfig(); + syncInput = syncPair.getValue(); + } + + @Test + void testValidateValidSchema() throws Exception { + final RecordSchemaValidator recordSchemaValidator = new RecordSchemaValidator(WorkerUtils.mapStreamNamesToSchemas(syncInput)); + recordSchemaValidator.validateSchema(VALID_RECORD.getRecord()); + } + + @Test + void testValidateInvalidSchema() throws Exception { + final RecordSchemaValidator recordSchemaValidator = new RecordSchemaValidator(WorkerUtils.mapStreamNamesToSchemas(syncInput)); + assertThrows(RecordSchemaValidationException.class, () -> recordSchemaValidator.validateSchema(INVALID_RECORD.getRecord())); + } + +} diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/TestConfigHelpers.java b/airbyte-workers/src/test/java/io/airbyte/workers/TestConfigHelpers.java index 6503c614db652..a9730dbfdd033 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/TestConfigHelpers.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/TestConfigHelpers.java @@ -37,6 +37,10 @@ public class TestConfigHelpers { private static final long LAST_SYNC_TIME = 1598565106; public static ImmutablePair createSyncConfig() { + return createSyncConfig(false); + } + + public static ImmutablePair createSyncConfig(final Boolean multipleNamespaces) { final UUID workspaceId = UUID.randomUUID(); final UUID sourceDefinitionId = UUID.randomUUID(); final UUID sourceId = UUID.randomUUID(); @@ -90,9 +94,21 @@ public static ImmutablePair createSyncConfig() .withGitRepoBranch("git url")) .withTombstone(false); - final ConfiguredAirbyteStream stream = new ConfiguredAirbyteStream() - .withStream(CatalogHelpers.createAirbyteStream(STREAM_NAME, Field.of(FIELD_NAME, JsonSchemaType.STRING))); - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withStreams(Collections.singletonList(stream)); + final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog(); + if (multipleNamespaces) { + final ConfiguredAirbyteStream streamOne = new ConfiguredAirbyteStream() + .withStream(CatalogHelpers.createAirbyteStream(STREAM_NAME, "namespace", Field.of(FIELD_NAME, JsonSchemaType.STRING))); + final ConfiguredAirbyteStream streamTwo = new ConfiguredAirbyteStream() + .withStream(CatalogHelpers.createAirbyteStream(STREAM_NAME, "namespace2", Field.of(FIELD_NAME, JsonSchemaType.STRING))); + + final List streams = List.of(streamOne, streamTwo); + catalog.withStreams(streams); + + } else { + final ConfiguredAirbyteStream stream = new ConfiguredAirbyteStream() + .withStream(CatalogHelpers.createAirbyteStream(STREAM_NAME, Field.of(FIELD_NAME, JsonSchemaType.STRING))); + catalog.withStreams(Collections.singletonList(stream)); + } final StandardSync standardSync = new StandardSync() .withConnectionId(connectionId) diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/WorkerUtilsTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/WorkerUtilsTest.java index 08cfa1dfe93ed..9beab9225aaa9 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/WorkerUtilsTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/WorkerUtilsTest.java @@ -4,20 +4,26 @@ package io.airbyte.workers; +import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoInteractions; import static org.mockito.Mockito.when; +import com.fasterxml.jackson.databind.JsonNode; import io.airbyte.config.Configs; import io.airbyte.config.EnvConfigs; +import io.airbyte.config.StandardSync; +import io.airbyte.config.StandardSyncInput; import io.airbyte.workers.protocols.airbyte.HeartbeatMonitor; import java.time.Duration; import java.time.temporal.ChronoUnit; +import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.BiConsumer; +import org.apache.commons.lang3.tuple.ImmutablePair; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Nested; @@ -116,6 +122,23 @@ void testProcessDies() { } + @Test + void testMapStreamNamesToSchemasWithNullNamespace() { + final ImmutablePair syncPair = TestConfigHelpers.createSyncConfig(); + final StandardSyncInput syncInput = syncPair.getValue(); + final Map mapOutput = WorkerUtils.mapStreamNamesToSchemas(syncInput); + assertNotNull(mapOutput.get("user_preferences")); + } + + @Test + void testMapStreamNamesToSchemasWithMultipleNamespaces() { + final ImmutablePair syncPair = TestConfigHelpers.createSyncConfig(true); + final StandardSyncInput syncInput = syncPair.getValue(); + final Map mapOutput = WorkerUtils.mapStreamNamesToSchemas(syncInput); + assertNotNull(mapOutput.get("namespaceuser_preferences")); + assertNotNull(mapOutput.get("namespace2user_preferences")); + } + /** * As long as the the heartbeatMonitor detects a heartbeat, the process will be allowed to continue. * This method checks the heartbeat once every minute. Once there is no heartbeat detected, if the diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/protocols/airbyte/AirbyteMessageUtils.java b/airbyte-workers/src/test/java/io/airbyte/workers/protocols/airbyte/AirbyteMessageUtils.java index 192d48f9607e5..27cdc8794c2f4 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/protocols/airbyte/AirbyteMessageUtils.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/protocols/airbyte/AirbyteMessageUtils.java @@ -46,7 +46,13 @@ public static AirbyteMessage createRecordMessage(final String tableName, } public static AirbyteMessage createRecordMessage(final String tableName, - final Map record) { + final String key, + final Integer value) { + return createRecordMessage(tableName, ImmutableMap.of(key, value)); + } + + public static AirbyteMessage createRecordMessage(final String tableName, + final Map record) { return createRecordMessage(tableName, Jsons.jsonNode(record), Instant.EPOCH); } From 05b123fa10715ba30b0a78494e87e8394dae953b Mon Sep 17 00:00:00 2001 From: Augustin Date: Tue, 10 May 2022 08:05:38 +0200 Subject: [PATCH 11/55] =?UTF-8?q?=F0=9F=90=9B=20octavia-cli:=20fix=20sync?= =?UTF-8?q?=5Fcatalog=20in=20generated=20connections=20(#12704)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../connection/expected.yaml | 44 ++---------- .../test_generate/test_renderers.py | 69 +++---------------- octavia-cli/octavia_cli/generate/renderers.py | 7 +- .../test_generate/test_renderers.py | 19 ++++- 4 files changed, 38 insertions(+), 101 deletions(-) diff --git a/octavia-cli/integration_tests/test_generate/expected_rendered_yaml/connection/expected.yaml b/octavia-cli/integration_tests/test_generate/expected_rendered_yaml/connection/expected.yaml index f23d68f79d199..7ff02b0fafcf4 100644 --- a/octavia-cli/integration_tests/test_generate/expected_rendered_yaml/connection/expected.yaml +++ b/octavia-cli/integration_tests/test_generate/expected_rendered_yaml/connection/expected.yaml @@ -21,44 +21,14 @@ configuration: sync_catalog: # OPTIONAL | object | 🚨 ONLY edit streams.config, streams.stream should not be edited as schema cannot be changed. streams: - config: - aliasName: aliasMock - cursorField: [] - destinationSyncMode: append - primaryKey: [] + alias_name: pokemon + destination_sync_mode: append selected: true - syncMode: full_refresh + sync_mode: full_refresh stream: - defaultCursorField: + default_cursor_field: - foo - jsonSchema: - $schema: http://json-schema.org/draft-07/schema# - properties: - foo: - type: number - name: stream_1 - namespace: null - sourceDefinedCursor: null - sourceDefinedPrimaryKey: [] - supportedSyncModes: + json_schema: {} + name: my_stream + supported_sync_modes: - full_refresh - - config: - aliasName: aliasMock - cursorField: [] - destinationSyncMode: append - primaryKey: [] - selected: true - syncMode: full_refresh - stream: - defaultCursorField: [] - jsonSchema: - $schema: http://json-schema.org/draft-07/schema# - properties: - bar: - type: number - name: stream_2 - namespace: null - sourceDefinedCursor: null - sourceDefinedPrimaryKey: [] - supportedSyncModes: - - full_refresh - - incremental diff --git a/octavia-cli/integration_tests/test_generate/test_renderers.py b/octavia-cli/integration_tests/test_generate/test_renderers.py index 1d3fa96952dd0..2431a439f0a98 100644 --- a/octavia-cli/integration_tests/test_generate/test_renderers.py +++ b/octavia-cli/integration_tests/test_generate/test_renderers.py @@ -7,6 +7,12 @@ import pytest import yaml +from airbyte_api_client.model.airbyte_catalog import AirbyteCatalog +from airbyte_api_client.model.airbyte_stream import AirbyteStream +from airbyte_api_client.model.airbyte_stream_and_configuration import AirbyteStreamAndConfiguration +from airbyte_api_client.model.airbyte_stream_configuration import AirbyteStreamConfiguration +from airbyte_api_client.model.destination_sync_mode import DestinationSyncMode +from airbyte_api_client.model.sync_mode import SyncMode from octavia_cli.generate.renderers import ConnectionRenderer, ConnectorSpecificationRenderer pytestmark = pytest.mark.integration @@ -89,65 +95,12 @@ def test_expected_output_connector_specification_renderer( def test_expected_output_connection_renderer(octavia_tmp_project_directory, mocker): - mock_source = mocker.Mock( - resource_id="my_source_id", - catalog={ - "streams": [ - { - "stream": { - "name": "stream_1", - "jsonSchema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "properties": { - "foo": { - "type": "number", - } - }, - }, - "supportedSyncModes": ["full_refresh"], - "sourceDefinedCursor": None, - "defaultCursorField": ["foo"], - "sourceDefinedPrimaryKey": [], - "namespace": None, - }, - "config": { - "syncMode": "full_refresh", - "cursorField": [], - "destinationSyncMode": "append", - "primaryKey": [], - "aliasName": "aliasMock", - "selected": True, - }, - }, - { - "stream": { - "name": "stream_2", - "jsonSchema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "properties": { - "bar": { - "type": "number", - } - }, - }, - "supportedSyncModes": ["full_refresh", "incremental"], - "sourceDefinedCursor": None, - "defaultCursorField": [], - "sourceDefinedPrimaryKey": [], - "namespace": None, - }, - "config": { - "syncMode": "full_refresh", - "cursorField": [], - "destinationSyncMode": "append", - "primaryKey": [], - "aliasName": "aliasMock", - "selected": True, - }, - }, - ] - }, + stream = AirbyteStream(default_cursor_field=["foo"], json_schema={}, name="my_stream", supported_sync_modes=[SyncMode("full_refresh")]) + config = AirbyteStreamConfiguration( + alias_name="pokemon", selected=True, destination_sync_mode=DestinationSyncMode("append"), sync_mode=SyncMode("full_refresh") ) + catalog = AirbyteCatalog([AirbyteStreamAndConfiguration(stream=stream, config=config)]) + mock_source = mocker.Mock(resource_id="my_source_id", catalog=catalog) mock_destination = mocker.Mock(resource_id="my_destination_id") renderer = ConnectionRenderer("my_new_connection", mock_source, mock_destination) diff --git a/octavia-cli/octavia_cli/generate/renderers.py b/octavia-cli/octavia_cli/generate/renderers.py index 0d84550eaf590..cd53713999744 100644 --- a/octavia-cli/octavia_cli/generate/renderers.py +++ b/octavia-cli/octavia_cli/generate/renderers.py @@ -7,6 +7,7 @@ from typing import Any, Callable, List import yaml +from airbyte_api_client.model.airbyte_catalog import AirbyteCatalog from jinja2 import Environment, PackageLoader, Template, select_autoescape from octavia_cli.apply import resources @@ -241,16 +242,16 @@ def __init__(self, connection_name: str, source: resources.Source, destination: self.destination = destination @staticmethod - def catalog_to_yaml(catalog: dict) -> str: + def catalog_to_yaml(catalog: AirbyteCatalog) -> str: """Convert the source catalog to a YAML string. Args: - catalog (dict): Source's catalog. + catalog (AirbyteCatalog): Source's catalog. Returns: str: Catalog rendered as yaml. """ - return yaml.dump(catalog, Dumper=CatalogDumper, default_flow_style=False) + return yaml.dump(catalog.to_dict(), Dumper=CatalogDumper, default_flow_style=False) def _render(self) -> str: yaml_catalog = self.catalog_to_yaml(self.source.catalog) diff --git a/octavia-cli/unit_tests/test_generate/test_renderers.py b/octavia-cli/unit_tests/test_generate/test_renderers.py index 80f1aab0fbeae..315ed823dd208 100644 --- a/octavia-cli/unit_tests/test_generate/test_renderers.py +++ b/octavia-cli/unit_tests/test_generate/test_renderers.py @@ -5,7 +5,14 @@ from unittest.mock import mock_open, patch import pytest -from octavia_cli.generate import renderers +import yaml +from airbyte_api_client.model.airbyte_catalog import AirbyteCatalog +from airbyte_api_client.model.airbyte_stream import AirbyteStream +from airbyte_api_client.model.airbyte_stream_and_configuration import AirbyteStreamAndConfiguration +from airbyte_api_client.model.airbyte_stream_configuration import AirbyteStreamConfiguration +from airbyte_api_client.model.destination_sync_mode import DestinationSyncMode +from airbyte_api_client.model.sync_mode import SyncMode +from octavia_cli.generate import renderers, yaml_dumpers class TestFieldToRender: @@ -268,9 +275,15 @@ def test_init(self, mock_source, mock_destination): assert connection_renderer.destination == mock_destination def test_catalog_to_yaml(self, mocker): - catalog = {"camelCase": "camelCase", "snake_case": "camelCase", "myArray": ["a", "b"]} + stream = AirbyteStream( + default_cursor_field=["foo"], json_schema={}, name="my_stream", supported_sync_modes=[SyncMode("full_refresh")] + ) + config = AirbyteStreamConfiguration( + alias_name="pokemon", selected=True, destination_sync_mode=DestinationSyncMode("append"), sync_mode=SyncMode("full_refresh") + ) + catalog = AirbyteCatalog([AirbyteStreamAndConfiguration(stream=stream, config=config)]) yaml_catalog = renderers.ConnectionRenderer.catalog_to_yaml(catalog) - assert yaml_catalog == "camelCase: camelCase\nmyArray:\n - a\n - b\nsnake_case: camelCase\n" + assert yaml_catalog == yaml.dump(catalog.to_dict(), Dumper=yaml_dumpers.CatalogDumper, default_flow_style=False) def test_write_yaml(self, mocker, mock_source, mock_destination): mocker.patch.object(renderers.ConnectionRenderer, "_get_output_path") From 2e25d7dbaeaa0a9a2c430e0d54f30a166a26fb6a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Koci=C5=84ski?= Date: Tue, 10 May 2022 09:43:31 +0200 Subject: [PATCH 12/55] =?UTF-8?q?=F0=9F=90=9B=20=20octavia-cli:=20fix=20ar?= =?UTF-8?q?gument=20parsing=20in=20octavia=20cli.=20(#12691)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- octavia-cli/octavia_cli/base_commands.py | 5 ++- octavia-cli/octavia_cli/telemetry.py | 8 ++-- octavia-cli/unit_tests/test_base_commands.py | 15 +++++-- octavia-cli/unit_tests/test_telemetry.py | 44 +++++++++++--------- 4 files changed, 44 insertions(+), 28 deletions(-) diff --git a/octavia-cli/octavia_cli/base_commands.py b/octavia-cli/octavia_cli/base_commands.py index 99d7487ede6f0..0f072e8ca9ecc 100644 --- a/octavia-cli/octavia_cli/base_commands.py +++ b/octavia-cli/octavia_cli/base_commands.py @@ -28,7 +28,10 @@ def make_context( return super().make_context(info_name, args, parent, **extra) except Exception as e: telemetry_client = parent.obj["TELEMETRY_CLIENT"] - telemetry_client.send_command_telemetry(parent, error=e, extra_info_name=info_name) + if isinstance(e, click.exceptions.Exit) and e.exit_code == 0: # Click raises Exit(0) errors when running --help commands + telemetry_client.send_command_telemetry(parent, extra_info_name=info_name, is_help=True) + else: + telemetry_client.send_command_telemetry(parent, error=e, extra_info_name=info_name) raise e def invoke(self, ctx: click.Context) -> t.Any: diff --git a/octavia-cli/octavia_cli/telemetry.py b/octavia-cli/octavia_cli/telemetry.py index f952fcb83669c..b288b3f5bb39d 100644 --- a/octavia-cli/octavia_cli/telemetry.py +++ b/octavia-cli/octavia_cli/telemetry.py @@ -63,7 +63,9 @@ def _create_command_name(self, ctx: click.Context, command_names: Optional[list] self._create_command_name(ctx.parent, command_names) return " ".join(command_names) if not extra_info_name else " ".join(command_names + [extra_info_name]) - def send_command_telemetry(self, ctx: click.Context, error: Optional[Exception] = None, extra_info_name: Optional[str] = None): + def send_command_telemetry( + self, ctx: click.Context, error: Optional[Exception] = None, extra_info_name: Optional[str] = None, is_help: bool = False + ): """Send telemetry with the analytics client. The event name is the command name. The context has the octavia version. @@ -76,15 +78,15 @@ def send_command_telemetry(self, ctx: click.Context, error: Optional[Exception] """ user_id = ctx.obj.get("WORKSPACE_ID") if ctx.obj.get("ANONYMOUS_DATA_COLLECTION", True) is False else None anonymous_id = None if user_id else str(uuid.uuid1()) - segment_context = {"app": {"name": "octavia-cli", "version": ctx.obj.get("OCTAVIA_VERSION")}} segment_properties = { "success": error is None, + "is_help": is_help, "error_type": error.__class__.__name__ if error is not None else None, "project_is_initialized": ctx.obj.get("PROJECT_IS_INITIALIZED"), "airbyter": os.getenv("AIRBYTE_ROLE") == "airbyter", } - command_name = self._create_command_name(ctx, extra_info_name) + command_name = self._create_command_name(ctx, extra_info_name=extra_info_name) self.segment_client.track( user_id=user_id, anonymous_id=anonymous_id, event=command_name, properties=segment_properties, context=segment_context ) diff --git a/octavia-cli/unit_tests/test_base_commands.py b/octavia-cli/unit_tests/test_base_commands.py index 4112159bc1bbc..967bd41ea7db2 100644 --- a/octavia-cli/unit_tests/test_base_commands.py +++ b/octavia-cli/unit_tests/test_base_commands.py @@ -22,14 +22,21 @@ def test_make_context(self, mocker, octavia_command): parent_make_context.assert_called_with("my_info_name", ["arg1", "arg2"], mock_parent_ctx, foo="foo", bar="bar") assert made_context == parent_make_context.return_value - def test_make_context_error(self, mocker, octavia_command, mock_telemetry_client): + @pytest.mark.parametrize("error", [Exception(), click.exceptions.Exit(0), click.exceptions.Exit(1)]) + def test_make_context_error(self, mocker, octavia_command, mock_telemetry_client, error): mock_parent_ctx = mocker.Mock(obj={"TELEMETRY_CLIENT": mock_telemetry_client}) - error = Exception() parent_make_context = mocker.Mock(side_effect=error) mocker.patch.object(click.Command, "make_context", parent_make_context) - with pytest.raises(Exception): + with pytest.raises(type(error)): octavia_command.make_context("my_info_name", ["arg1", "arg2"], parent=mock_parent_ctx, foo="foo", bar="bar") - mock_telemetry_client.send_command_telemetry.assert_called_with(mock_parent_ctx, error=error, extra_info_name="my_info_name") + if isinstance(error, click.exceptions.Exit) and error.exit_code == 0: + mock_telemetry_client.send_command_telemetry.assert_called_with( + mock_parent_ctx, extra_info_name="my_info_name", is_help=True + ) + else: + mock_telemetry_client.send_command_telemetry.assert_called_with( + mock_parent_ctx, error=error, extra_info_name="my_info_name" + ) def test_invoke(self, mocker, octavia_command, mock_telemetry_client): mock_ctx = mocker.Mock(obj={"TELEMETRY_CLIENT": mock_telemetry_client}) diff --git a/octavia-cli/unit_tests/test_telemetry.py b/octavia-cli/unit_tests/test_telemetry.py index 3fc5446b696bd..81e630970c520 100644 --- a/octavia-cli/unit_tests/test_telemetry.py +++ b/octavia-cli/unit_tests/test_telemetry.py @@ -54,25 +54,26 @@ def test__create_command_name_single_context(self, mocker, telemetry_client, ext assert command_name == "child_command" @pytest.mark.parametrize( - "workspace_id, anonymous_data_collection, airbyte_role, project_is_initialized, octavia_version, error, expected_success, expected_error_type", + "workspace_id, anonymous_data_collection, airbyte_role, project_is_initialized, octavia_version, error, expected_success, expected_error_type, is_help", [ - (None, None, None, None, None, None, True, None), - (None, None, None, None, None, Exception(), False, "Exception"), - (None, None, None, None, None, AttributeError(), False, "AttributeError"), - (None, True, None, None, None, None, True, None), - (None, True, None, None, None, Exception(), False, "Exception"), - (None, True, None, None, None, AttributeError(), False, "AttributeError"), - ("my_workspace_id", False, None, None, None, None, True, None), - ("my_workspace_id", False, None, None, None, Exception(), False, "Exception"), - ("my_workspace_id", True, None, None, None, None, True, None), - ("my_workspace_id", True, None, None, None, Exception(), False, "Exception"), - ("my_workspace_id", True, "airbyter", None, None, None, True, None), - ("my_workspace_id", True, "non_airbyter", None, None, Exception(), False, "Exception"), - ("my_workspace_id", True, "airbyter", True, None, None, True, None), - ("my_workspace_id", True, "non_airbyter", False, None, Exception(), False, "Exception"), - ("my_workspace_id", True, "airbyter", True, None, None, True, None), - ("my_workspace_id", True, "non_airbyter", False, "0.1.0", Exception(), False, "Exception"), - ("my_workspace_id", True, "non_airbyter", False, "0.1.0", None, True, None), + (None, None, None, None, None, None, True, None, False), + (None, None, None, None, None, Exception(), False, "Exception", False), + (None, None, None, None, None, AttributeError(), False, "AttributeError", False), + (None, True, None, None, None, None, True, None, False), + (None, True, None, None, None, Exception(), False, "Exception", False), + (None, True, None, None, None, AttributeError(), False, "AttributeError", False), + ("my_workspace_id", False, None, None, None, None, True, None, False), + ("my_workspace_id", False, None, None, None, Exception(), False, "Exception", False), + ("my_workspace_id", True, None, None, None, None, True, None, False), + ("my_workspace_id", True, None, None, None, Exception(), False, "Exception", False), + ("my_workspace_id", True, "airbyter", None, None, None, True, None, False), + ("my_workspace_id", True, "non_airbyter", None, None, Exception(), False, "Exception", False), + ("my_workspace_id", True, "airbyter", True, None, None, True, None, False), + ("my_workspace_id", True, "non_airbyter", False, None, Exception(), False, "Exception", False), + ("my_workspace_id", True, "airbyter", True, None, None, True, None, False), + ("my_workspace_id", True, "non_airbyter", False, "0.1.0", Exception(), False, "Exception", False), + ("my_workspace_id", True, "non_airbyter", False, "0.1.0", None, True, None, False), + ("my_workspace_id", True, "non_airbyter", False, "0.1.0", None, True, None, True), ], ) def test_send_command_telemetry( @@ -87,6 +88,7 @@ def test_send_command_telemetry( error, expected_success, expected_error_type, + is_help, ): extra_info_name = "foo" mocker.patch.object(telemetry.os, "getenv", mocker.Mock(return_value=airbyte_role)) @@ -104,14 +106,16 @@ def test_send_command_telemetry( expected_segment_context = {"app": {"name": "octavia-cli", "version": octavia_version}} expected_properties = { "success": expected_success, + "is_help": is_help, "error_type": expected_error_type, "project_is_initialized": project_is_initialized, "airbyter": airbyte_role == "airbyter", } telemetry_client.segment_client = mocker.Mock() telemetry_client._create_command_name = mocker.Mock(return_value="my_command") - telemetry_client.send_command_telemetry(mock_ctx, error=error, extra_info_name=extra_info_name) - telemetry_client._create_command_name.assert_called_with(mock_ctx, extra_info_name) + + telemetry_client.send_command_telemetry(mock_ctx, error=error, extra_info_name=extra_info_name, is_help=is_help) + telemetry_client._create_command_name.assert_called_with(mock_ctx, extra_info_name=extra_info_name) telemetry_client.segment_client.track.assert_called_with( user_id=expected_user_id, anonymous_id=expected_anonymous_id, From 0b2eb7a8fc2f4ebe58a3ab64d2bafb54f85d4c94 Mon Sep 17 00:00:00 2001 From: Baz Date: Tue, 10 May 2022 10:56:28 +0300 Subject: [PATCH 13/55] Source Amplitude: fix the doc to match the template (#12698) --- docs/integrations/sources/amplitude.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/integrations/sources/amplitude.md b/docs/integrations/sources/amplitude.md index e152838e752a9..6334de4ead0d7 100644 --- a/docs/integrations/sources/amplitude.md +++ b/docs/integrations/sources/amplitude.md @@ -9,18 +9,17 @@ Before you begin replicating the data from `Amplitude`, please follow this guide Once you have your credentials, you now can use them in order to setup the connection in Airbyte. ## Setup guide -### Requirements -* Amplitude API Key -* Amplitude Secret Key -* Start Date +### Step 1: Set up Amplitude source +You would need to obtain your Amplitude `API Key` and `Secret Key` using this [guide](https://help.amplitude.com/hc/en-us/articles/360058073772-Create-and-manage-organizations-and-projects#view-and-edit-your-project-information) to set up the connector in Airbyte. -Please read [How to get your API key and Secret key](https://help.amplitude.com/hc/en-us/articles/360058073772-Create-and-manage-organizations-and-projects#view-and-edit-your-project-information) before you begin setup the Airbyte connection. +### Step 2: Set up Amplitude source connector in Airbyte ### For OSS Airbyte: 1. In the left navigation bar, click **Sources**. In the top-right corner, click **+ new source**. 2. On the Set up the `source` page, enter the name for the `Amplitude` connector and select **Amplitude** from the Source type dropdown. 3. Enter your `API Key` and `Secret Key` to corresponding fields 4. Enter the `Start Date` as the statrting point for your data replication. +5. Click on `Check Connection` to finish configuring the Amplitude source. ### For Airbyte Cloud: @@ -29,6 +28,7 @@ Please read [How to get your API key and Secret key](https://help.amplitude.com/ 3. On the Set up the `source` page, enter the name for the `Amplitude` connector and select **Amplitude** from the Source type dropdown. 4. Enter your `API Key` and `Secret Key` to corresponding fields 5. Enter the `Start Date` as the statrting point for your data replication. +6. Click on `Check Connection` to finish configuring the Amplitude source. ## Supported Streams From 7c4bebff1897caccbdcdbf0bd1e940fb411ebb6a Mon Sep 17 00:00:00 2001 From: Octavia Squidington III <90398440+octavia-squidington-iii@users.noreply.github.com> Date: Tue, 10 May 2022 05:39:46 -0300 Subject: [PATCH 14/55] Bump Airbyte version from 0.38.0-alpha to 0.38.1-alpha (#12724) Co-authored-by: alafanechere --- .bumpversion.cfg | 2 +- .env | 2 +- airbyte-bootloader/Dockerfile | 2 +- airbyte-container-orchestrator/Dockerfile | 2 +- airbyte-metrics/reporter/Dockerfile | 2 +- airbyte-scheduler/app/Dockerfile | 2 +- airbyte-server/Dockerfile | 2 +- airbyte-webapp/package-lock.json | 4 ++-- airbyte-webapp/package.json | 2 +- airbyte-workers/Dockerfile | 2 +- charts/airbyte/Chart.yaml | 2 +- charts/airbyte/README.md | 10 +++++----- charts/airbyte/values.yaml | 10 +++++----- docs/operator-guides/upgrading-airbyte.md | 2 +- kube/overlays/stable-with-resource-limits/.env | 2 +- .../stable-with-resource-limits/kustomization.yaml | 12 ++++++------ kube/overlays/stable/.env | 2 +- kube/overlays/stable/kustomization.yaml | 12 ++++++------ octavia-cli/Dockerfile | 2 +- octavia-cli/README.md | 2 +- octavia-cli/install.sh | 2 +- octavia-cli/setup.py | 2 +- 22 files changed, 41 insertions(+), 41 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index e27cc15fb054f..6d106cc3f50f7 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.38.0-alpha +current_version = 0.38.1-alpha commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\-[a-z]+)? diff --git a/.env b/.env index cd01404075e79..311f60ef10a9d 100644 --- a/.env +++ b/.env @@ -10,7 +10,7 @@ ### SHARED ### -VERSION=0.38.0-alpha +VERSION=0.38.1-alpha # When using the airbyte-db via default docker image CONFIG_ROOT=/data diff --git a/airbyte-bootloader/Dockerfile b/airbyte-bootloader/Dockerfile index 11b728bb68288..f272323d3ff8f 100644 --- a/airbyte-bootloader/Dockerfile +++ b/airbyte-bootloader/Dockerfile @@ -1,7 +1,7 @@ ARG JDK_VERSION=17.0.1 FROM openjdk:${JDK_VERSION}-slim -ARG VERSION=0.38.0-alpha +ARG VERSION=0.38.1-alpha ENV APPLICATION airbyte-bootloader ENV VERSION ${VERSION} diff --git a/airbyte-container-orchestrator/Dockerfile b/airbyte-container-orchestrator/Dockerfile index bdd14adc9e4a2..22ad887ca064e 100644 --- a/airbyte-container-orchestrator/Dockerfile +++ b/airbyte-container-orchestrator/Dockerfile @@ -26,7 +26,7 @@ RUN echo "deb [signed-by=/usr/share/keyrings/kubernetes-archive-keyring.gpg] htt RUN apt-get update && apt-get install -y kubectl # Don't change this manually. Bump version expects to make moves based on this string -ARG VERSION=0.38.0-alpha +ARG VERSION=0.38.1-alpha ENV APPLICATION airbyte-container-orchestrator ENV VERSION=${VERSION} diff --git a/airbyte-metrics/reporter/Dockerfile b/airbyte-metrics/reporter/Dockerfile index 5d3e153a9fe84..99c710f56792d 100644 --- a/airbyte-metrics/reporter/Dockerfile +++ b/airbyte-metrics/reporter/Dockerfile @@ -1,7 +1,7 @@ ARG JDK_VERSION=17.0.1 FROM openjdk:${JDK_VERSION}-slim AS metrics-reporter -ARG VERSION=0.38.0-alpha +ARG VERSION=0.38.1-alpha ENV APPLICATION airbyte-metrics-reporter ENV VERSION ${VERSION} diff --git a/airbyte-scheduler/app/Dockerfile b/airbyte-scheduler/app/Dockerfile index 6acd8cdba6b95..e5dcb4bd5e622 100644 --- a/airbyte-scheduler/app/Dockerfile +++ b/airbyte-scheduler/app/Dockerfile @@ -1,7 +1,7 @@ ARG JDK_VERSION=17.0.1 FROM openjdk:${JDK_VERSION}-slim AS scheduler -ARG VERSION=0.38.0-alpha +ARG VERSION=0.38.1-alpha ENV APPLICATION airbyte-scheduler ENV VERSION ${VERSION} diff --git a/airbyte-server/Dockerfile b/airbyte-server/Dockerfile index 3d7b0ad520f3d..f4f4fb3e3e33b 100644 --- a/airbyte-server/Dockerfile +++ b/airbyte-server/Dockerfile @@ -3,7 +3,7 @@ FROM openjdk:${JDK_VERSION}-slim AS server EXPOSE 8000 -ARG VERSION=0.38.0-alpha +ARG VERSION=0.38.1-alpha ENV APPLICATION airbyte-server ENV VERSION ${VERSION} diff --git a/airbyte-webapp/package-lock.json b/airbyte-webapp/package-lock.json index 9d22c4fb39344..74f4ec270e530 100644 --- a/airbyte-webapp/package-lock.json +++ b/airbyte-webapp/package-lock.json @@ -1,12 +1,12 @@ { "name": "airbyte-webapp", - "version": "0.38.0-alpha", + "version": "0.38.1-alpha", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "airbyte-webapp", - "version": "0.38.0-alpha", + "version": "0.38.1-alpha", "dependencies": { "@fortawesome/fontawesome-svg-core": "^6.1.1", "@fortawesome/free-brands-svg-icons": "^6.1.1", diff --git a/airbyte-webapp/package.json b/airbyte-webapp/package.json index f868275c6f13e..cce629bc10185 100644 --- a/airbyte-webapp/package.json +++ b/airbyte-webapp/package.json @@ -1,6 +1,6 @@ { "name": "airbyte-webapp", - "version": "0.38.0-alpha", + "version": "0.38.1-alpha", "private": true, "engines": { "node": ">=16.0.0" diff --git a/airbyte-workers/Dockerfile b/airbyte-workers/Dockerfile index 0925e82d50e4a..12d6386770837 100644 --- a/airbyte-workers/Dockerfile +++ b/airbyte-workers/Dockerfile @@ -25,7 +25,7 @@ RUN curl -fsSLo /usr/share/keyrings/kubernetes-archive-keyring.gpg https://packa RUN echo "deb [signed-by=/usr/share/keyrings/kubernetes-archive-keyring.gpg] https://apt.kubernetes.io/ kubernetes-xenial main" | tee /etc/apt/sources.list.d/kubernetes.list RUN apt-get update && apt-get install -y kubectl -ARG VERSION=0.38.0-alpha +ARG VERSION=0.38.1-alpha ENV APPLICATION airbyte-workers ENV VERSION ${VERSION} diff --git a/charts/airbyte/Chart.yaml b/charts/airbyte/Chart.yaml index fab282b1d580b..6f2e30de853de 100644 --- a/charts/airbyte/Chart.yaml +++ b/charts/airbyte/Chart.yaml @@ -21,7 +21,7 @@ version: 0.3.2 # incremented each time you make changes to the application. Versions are not expected to # follow Semantic Versioning. They should reflect the version the application is using. # It is recommended to use it with quotes. -appVersion: "0.38.0-alpha" +appVersion: "0.38.1-alpha" dependencies: - name: common diff --git a/charts/airbyte/README.md b/charts/airbyte/README.md index 9e18842a84c6e..709ac8068d334 100644 --- a/charts/airbyte/README.md +++ b/charts/airbyte/README.md @@ -30,7 +30,7 @@ Helm charts for Airbyte. | `webapp.replicaCount` | Number of webapp replicas | `1` | | `webapp.image.repository` | The repository to use for the airbyte webapp image. | `airbyte/webapp` | | `webapp.image.pullPolicy` | the pull policy to use for the airbyte webapp image | `IfNotPresent` | -| `webapp.image.tag` | The airbyte webapp image tag. Defaults to the chart's AppVersion | `0.38.0-alpha` | +| `webapp.image.tag` | The airbyte webapp image tag. Defaults to the chart's AppVersion | `0.38.1-alpha` | | `webapp.podAnnotations` | Add extra annotations to the webapp pod(s) | `{}` | | `webapp.containerSecurityContext` | Security context for the container | `{}` | | `webapp.livenessProbe.enabled` | Enable livenessProbe on the webapp | `true` | @@ -72,7 +72,7 @@ Helm charts for Airbyte. | `scheduler.replicaCount` | Number of scheduler replicas | `1` | | `scheduler.image.repository` | The repository to use for the airbyte scheduler image. | `airbyte/scheduler` | | `scheduler.image.pullPolicy` | the pull policy to use for the airbyte scheduler image | `IfNotPresent` | -| `scheduler.image.tag` | The airbyte scheduler image tag. Defaults to the chart's AppVersion | `0.38.0-alpha` | +| `scheduler.image.tag` | The airbyte scheduler image tag. Defaults to the chart's AppVersion | `0.38.1-alpha` | | `scheduler.podAnnotations` | Add extra annotations to the scheduler pod | `{}` | | `scheduler.containerSecurityContext` | Security context for the container | `{}` | | `scheduler.livenessProbe.enabled` | Enable livenessProbe on the scheduler | `true` | @@ -135,7 +135,7 @@ Helm charts for Airbyte. | `server.replicaCount` | Number of server replicas | `1` | | `server.image.repository` | The repository to use for the airbyte server image. | `airbyte/server` | | `server.image.pullPolicy` | the pull policy to use for the airbyte server image | `IfNotPresent` | -| `server.image.tag` | The airbyte server image tag. Defaults to the chart's AppVersion | `0.38.0-alpha` | +| `server.image.tag` | The airbyte server image tag. Defaults to the chart's AppVersion | `0.38.1-alpha` | | `server.podAnnotations` | Add extra annotations to the server pod | `{}` | | `server.containerSecurityContext` | Security context for the container | `{}` | | `server.livenessProbe.enabled` | Enable livenessProbe on the server | `true` | @@ -170,7 +170,7 @@ Helm charts for Airbyte. | `worker.replicaCount` | Number of worker replicas | `1` | | `worker.image.repository` | The repository to use for the airbyte worker image. | `airbyte/worker` | | `worker.image.pullPolicy` | the pull policy to use for the airbyte worker image | `IfNotPresent` | -| `worker.image.tag` | The airbyte worker image tag. Defaults to the chart's AppVersion | `0.38.0-alpha` | +| `worker.image.tag` | The airbyte worker image tag. Defaults to the chart's AppVersion | `0.38.1-alpha` | | `worker.podAnnotations` | Add extra annotations to the worker pod(s) | `{}` | | `worker.containerSecurityContext` | Security context for the container | `{}` | | `worker.livenessProbe.enabled` | Enable livenessProbe on the worker | `true` | @@ -202,7 +202,7 @@ Helm charts for Airbyte. | ----------------------------- | -------------------------------------------------------------------- | -------------------- | | `bootloader.image.repository` | The repository to use for the airbyte bootloader image. | `airbyte/bootloader` | | `bootloader.image.pullPolicy` | the pull policy to use for the airbyte bootloader image | `IfNotPresent` | -| `bootloader.image.tag` | The airbyte bootloader image tag. Defaults to the chart's AppVersion | `0.38.0-alpha` | +| `bootloader.image.tag` | The airbyte bootloader image tag. Defaults to the chart's AppVersion | `0.38.1-alpha` | | `bootloader.podAnnotations` | Add extra annotations to the bootloader pod | `{}` | | `bootloader.nodeSelector` | Node labels for pod assignment | `{}` | | `bootloader.tolerations` | Tolerations for worker pod assignment. | `[]` | diff --git a/charts/airbyte/values.yaml b/charts/airbyte/values.yaml index 21cbd5baaf892..5ded7bfc76cf7 100644 --- a/charts/airbyte/values.yaml +++ b/charts/airbyte/values.yaml @@ -41,7 +41,7 @@ webapp: image: repository: airbyte/webapp pullPolicy: IfNotPresent - tag: 0.38.0-alpha + tag: 0.38.1-alpha ## @param webapp.podAnnotations [object] Add extra annotations to the webapp pod(s) ## @@ -207,7 +207,7 @@ scheduler: image: repository: airbyte/scheduler pullPolicy: IfNotPresent - tag: 0.38.0-alpha + tag: 0.38.1-alpha ## @param scheduler.podAnnotations [object] Add extra annotations to the scheduler pod ## @@ -438,7 +438,7 @@ server: image: repository: airbyte/server pullPolicy: IfNotPresent - tag: 0.38.0-alpha + tag: 0.38.1-alpha ## @param server.podAnnotations [object] Add extra annotations to the server pod ## @@ -565,7 +565,7 @@ worker: image: repository: airbyte/worker pullPolicy: IfNotPresent - tag: 0.38.0-alpha + tag: 0.38.1-alpha ## @param worker.podAnnotations [object] Add extra annotations to the worker pod(s) ## @@ -683,7 +683,7 @@ bootloader: image: repository: airbyte/bootloader pullPolicy: IfNotPresent - tag: 0.38.0-alpha + tag: 0.38.1-alpha ## @param bootloader.podAnnotations [object] Add extra annotations to the bootloader pod ## diff --git a/docs/operator-guides/upgrading-airbyte.md b/docs/operator-guides/upgrading-airbyte.md index 081d2f20afcb5..9371aef650a9c 100644 --- a/docs/operator-guides/upgrading-airbyte.md +++ b/docs/operator-guides/upgrading-airbyte.md @@ -103,7 +103,7 @@ If you are upgrading from (i.e. your current version of Airbyte is) Airbyte vers Here's an example of what it might look like with the values filled in. It assumes that the downloaded `airbyte_archive.tar.gz` is in `/tmp`. ```bash - docker run --rm -v /tmp:/config airbyte/migration:0.38.0-alpha --\ + docker run --rm -v /tmp:/config airbyte/migration:0.38.1-alpha --\ --input /config/airbyte_archive.tar.gz\ --output /config/airbyte_archive_migrated.tar.gz ``` diff --git a/kube/overlays/stable-with-resource-limits/.env b/kube/overlays/stable-with-resource-limits/.env index 9d3d78ea61dbf..b9c1a6e7ab7e5 100644 --- a/kube/overlays/stable-with-resource-limits/.env +++ b/kube/overlays/stable-with-resource-limits/.env @@ -1,4 +1,4 @@ -AIRBYTE_VERSION=0.38.0-alpha +AIRBYTE_VERSION=0.38.1-alpha # Airbyte Internal Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db DATABASE_HOST=airbyte-db-svc diff --git a/kube/overlays/stable-with-resource-limits/kustomization.yaml b/kube/overlays/stable-with-resource-limits/kustomization.yaml index 71ad4ddec69b6..7fb89e975407b 100644 --- a/kube/overlays/stable-with-resource-limits/kustomization.yaml +++ b/kube/overlays/stable-with-resource-limits/kustomization.yaml @@ -8,17 +8,17 @@ bases: images: - name: airbyte/db - newTag: 0.38.0-alpha + newTag: 0.38.1-alpha - name: airbyte/bootloader - newTag: 0.38.0-alpha + newTag: 0.38.1-alpha - name: airbyte/scheduler - newTag: 0.38.0-alpha + newTag: 0.38.1-alpha - name: airbyte/server - newTag: 0.38.0-alpha + newTag: 0.38.1-alpha - name: airbyte/webapp - newTag: 0.38.0-alpha + newTag: 0.38.1-alpha - name: airbyte/worker - newTag: 0.38.0-alpha + newTag: 0.38.1-alpha - name: temporalio/auto-setup newTag: 1.7.0 diff --git a/kube/overlays/stable/.env b/kube/overlays/stable/.env index 4ac1e1f2282cf..e277a9be5bee9 100644 --- a/kube/overlays/stable/.env +++ b/kube/overlays/stable/.env @@ -1,4 +1,4 @@ -AIRBYTE_VERSION=0.38.0-alpha +AIRBYTE_VERSION=0.38.1-alpha # Airbyte Internal Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db DATABASE_HOST=airbyte-db-svc diff --git a/kube/overlays/stable/kustomization.yaml b/kube/overlays/stable/kustomization.yaml index b7d826d7927f4..cb4cb510b7d48 100644 --- a/kube/overlays/stable/kustomization.yaml +++ b/kube/overlays/stable/kustomization.yaml @@ -8,17 +8,17 @@ bases: images: - name: airbyte/db - newTag: 0.38.0-alpha + newTag: 0.38.1-alpha - name: airbyte/bootloader - newTag: 0.38.0-alpha + newTag: 0.38.1-alpha - name: airbyte/scheduler - newTag: 0.38.0-alpha + newTag: 0.38.1-alpha - name: airbyte/server - newTag: 0.38.0-alpha + newTag: 0.38.1-alpha - name: airbyte/webapp - newTag: 0.38.0-alpha + newTag: 0.38.1-alpha - name: airbyte/worker - newTag: 0.38.0-alpha + newTag: 0.38.1-alpha - name: temporalio/auto-setup newTag: 1.7.0 diff --git a/octavia-cli/Dockerfile b/octavia-cli/Dockerfile index d94558cafb812..d025d9304e725 100644 --- a/octavia-cli/Dockerfile +++ b/octavia-cli/Dockerfile @@ -14,5 +14,5 @@ USER octavia-cli WORKDIR /home/octavia-project ENTRYPOINT ["octavia"] -LABEL io.airbyte.version=0.38.0-alpha +LABEL io.airbyte.version=0.38.1-alpha LABEL io.airbyte.name=airbyte/octavia-cli diff --git a/octavia-cli/README.md b/octavia-cli/README.md index 5431288886d59..74b9a04d8b530 100644 --- a/octavia-cli/README.md +++ b/octavia-cli/README.md @@ -105,7 +105,7 @@ This script: ```bash touch ~/.octavia # Create a file to store env variables that will be mapped the octavia-cli container mkdir my_octavia_project_directory # Create your octavia project directory where YAML configurations will be stored. -docker run --name octavia-cli -i --rm -v my_octavia_project_directory:/home/octavia-project --network host --user $(id -u):$(id -g) --env-file ~/.octavia airbyte/octavia-cli:0.38.0-alpha +docker run --name octavia-cli -i --rm -v my_octavia_project_directory:/home/octavia-project --network host --user $(id -u):$(id -g) --env-file ~/.octavia airbyte/octavia-cli:0.38.1-alpha ``` ### Using `docker-compose` diff --git a/octavia-cli/install.sh b/octavia-cli/install.sh index 3ade2bcef14f4..5f94727505cf6 100755 --- a/octavia-cli/install.sh +++ b/octavia-cli/install.sh @@ -3,7 +3,7 @@ # This install scripts currently only works for ZSH and Bash profiles. # It creates an octavia alias in your profile bound to a docker run command and your current user. -VERSION=0.38.0-alpha +VERSION=0.38.1-alpha OCTAVIA_ENV_FILE=${HOME}/.octavia detect_profile() { diff --git a/octavia-cli/setup.py b/octavia-cli/setup.py index ebeafe6e3b0f8..a8fab03974232 100644 --- a/octavia-cli/setup.py +++ b/octavia-cli/setup.py @@ -15,7 +15,7 @@ setup( name="octavia-cli", - version="0.38.0", + version="0.38.1", description="A command line interface to manage Airbyte configurations", long_description=README, author="Airbyte", From 230dc3af6842ef1b1fd27aee9c0b0622aab54902 Mon Sep 17 00:00:00 2001 From: Krishna Glick Date: Tue, 10 May 2022 11:23:19 -0700 Subject: [PATCH 15/55] Adding VS Code settings to project workspace (#12319) * First pass * Code Review changes * Adding a code workspace * Adding extension recommendations --- .gitignore | 2 -- .vscode/frontend.code-workspace | 54 +++++++++++++++++++++++++++++++++ .vscode/settings.json | 36 ++++++++++++++++++++++ 3 files changed, 90 insertions(+), 2 deletions(-) create mode 100644 .vscode/frontend.code-workspace create mode 100644 .vscode/settings.json diff --git a/.gitignore b/.gitignore index 9737f1231509e..17b544c1d7e26 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,5 @@ .gradle .idea -.vscode *.iml *.swp build @@ -13,7 +12,6 @@ data .settings **/gmon.out static_checker_reports/ -.vscode # Logs acceptance_tests_logs/ diff --git a/.vscode/frontend.code-workspace b/.vscode/frontend.code-workspace new file mode 100644 index 0000000000000..8ec2e7d144b29 --- /dev/null +++ b/.vscode/frontend.code-workspace @@ -0,0 +1,54 @@ +{ + "folders": [ + { + "path": "../airbyte-webapp" + }, + { + "path": "../airbyte-webapp-e2e-tests" + } + ], + "extensions": { + "recommendations": [ + "dbaeumer.vscode-eslint", + "esbenp.prettier-vscode", + "ms-vsliveshare.vsliveshare", + "eamodio.gitlens" + ] + }, + "settings": { + "javascript.preferences.quoteStyle": "double", + "typescript.preferences.quoteStyle": "double", + "javascript.preferences.importModuleSpecifier": "shortest", + "typescript.preferences.importModuleSpecifier": "shortest", + "javascript.updateImportsOnFileMove.enabled": "always", + "typescript.updateImportsOnFileMove.enabled": "always", + "editor.detectIndentation": true, + "eslint.format.enable": true, + "eslint.run": "onType", + "[javascript]": { + "editor.formatOnSave": true, + "editor.defaultFormatter": "dbaeumer.vscode-eslint", + "editor.codeActionsOnSave": { + "source.organizeImports": false + } + }, + "[typescript]": { + "editor.formatOnSave": true, + "editor.defaultFormatter": "dbaeumer.vscode-eslint", + "editor.codeActionsOnSave": { + "source.organizeImports": false + } + }, + "[typescriptreact]": { + "editor.formatOnSave": true, + "editor.defaultFormatter": "dbaeumer.vscode-eslint", + "editor.codeActionsOnSave": { + "source.organizeImports": false + } + }, + "[json]": { + "editor.formatOnSave": true, + "editor.defaultFormatter": "esbenp.prettier-vscode" + } + } +} diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000000000..f033cb881e892 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,36 @@ +{ + "javascript.preferences.quoteStyle": "double", + "typescript.preferences.quoteStyle": "double", + "javascript.preferences.importModuleSpecifier": "shortest", + "typescript.preferences.importModuleSpecifier": "shortest", + "javascript.updateImportsOnFileMove.enabled": "always", + "typescript.updateImportsOnFileMove.enabled": "always", + "editor.detectIndentation": true, + "eslint.format.enable": true, + "eslint.run": "onType", + "[javascript]": { + "editor.formatOnSave": true, + "editor.defaultFormatter": "dbaeumer.vscode-eslint", + "editor.codeActionsOnSave": { + "source.organizeImports": false + } + }, + "[typescript]": { + "editor.formatOnSave": true, + "editor.defaultFormatter": "dbaeumer.vscode-eslint", + "editor.codeActionsOnSave": { + "source.organizeImports": false + } + }, + "[typescriptreact]": { + "editor.formatOnSave": true, + "editor.defaultFormatter": "dbaeumer.vscode-eslint", + "editor.codeActionsOnSave": { + "source.organizeImports": false + } + }, + "[json]": { + "editor.formatOnSave": true, + "editor.defaultFormatter": "esbenp.prettier-vscode" + } +} From b0f06e062217cfc6bc3b04b7adfd1c6f9f09f784 Mon Sep 17 00:00:00 2001 From: Vladimir Date: Tue, 10 May 2022 22:54:48 +0300 Subject: [PATCH 16/55] Fix cancel button when it doesn't provide feedback to the user + UX improvements (#12569) * set a loading state for 'Cancel' button if user clicked * extend ToolTip component to support 'not-allowed' cursor * disable 'Reset data' and 'Sync now' buttons if sync is in 'pending' or 'running' status * set loading state and disable buttons in ResetDataModal if reset process is launched * extend ToolTip component - shoe desired cursor type when tooltip is active * refactored: control tooltips showing this props * replace functions call in jsx with components * add style for loading 'danger' button * minor improvements - use ternary operator conditionally setting the desired text * replace using ResetData modal with default text with useConfirmationModalService * replace ResetDataModal (changed column) with useConfirmationModalService * set loading state to 'false' if async action has failed * replace ResetDataModal (refresh schema) with useConfirmationModalService * extend ConfirmationModal - to support optional cancelButtonText prop * fix '../../..' import * remove obsolete ResetDataModal component * extend useLoadingState component - to show an error notification if async action is failed * show notification message on top * updated tests * turn on eslint 'curly' rule and fix code style * turn on eslint 'curly' rule and fix code style --- airbyte-webapp/.eslintrc | 1 + .../ConfirmationModal/ConfirmationModal.tsx | 40 +++++---- .../src/components/EntityTable/utils.tsx | 4 +- .../JobItem/components/MainInfo.tsx | 20 +++-- .../ResetDataModal/ResetDataModal.tsx | 82 ------------------- .../src/components/ResetDataModal/index.tsx | 3 - .../src/components/ResetDataModal/types.ts | 5 -- .../SingletonCard/SingletonCard.tsx | 1 + .../src/components/ToolTip/ToolTip.tsx | 8 +- .../components/base/Button/LoadingButton.tsx | 12 ++- .../ConfirmationModalService.tsx | 1 + airbyte-webapp/src/hooks/useLoadingState.tsx | 42 +++++++--- airbyte-webapp/src/locales/en.json | 3 + .../src/packages/firebaseReact/sdk.tsx | 8 +- .../components/ReplicationView.tsx | 28 +++---- .../components/StatusView.tsx | 82 ++++++++++++------- airbyte-webapp/src/theme.ts | 1 + .../ConnectionForm/ConnectionForm.test.tsx | 28 +++++-- .../ConnectionForm/ConnectionForm.tsx | 37 +++++---- 19 files changed, 204 insertions(+), 202 deletions(-) delete mode 100644 airbyte-webapp/src/components/ResetDataModal/ResetDataModal.tsx delete mode 100644 airbyte-webapp/src/components/ResetDataModal/index.tsx delete mode 100644 airbyte-webapp/src/components/ResetDataModal/types.ts diff --git a/airbyte-webapp/.eslintrc b/airbyte-webapp/.eslintrc index 38c7b05515be3..16931adccbcc0 100644 --- a/airbyte-webapp/.eslintrc +++ b/airbyte-webapp/.eslintrc @@ -15,6 +15,7 @@ } }, "rules": { + "curly": "error", "prettier/prettier": "error", "unused-imports/no-unused-imports": "error", "import/order": [ diff --git a/airbyte-webapp/src/components/ConfirmationModal/ConfirmationModal.tsx b/airbyte-webapp/src/components/ConfirmationModal/ConfirmationModal.tsx index 5bc3700889608..ab48e88fc16c6 100644 --- a/airbyte-webapp/src/components/ConfirmationModal/ConfirmationModal.tsx +++ b/airbyte-webapp/src/components/ConfirmationModal/ConfirmationModal.tsx @@ -2,9 +2,12 @@ import React from "react"; import { FormattedMessage } from "react-intl"; import styled from "styled-components"; +import { LoadingButton } from "components"; import { Button } from "components/base/Button"; import Modal from "components/Modal"; +import useLoadingState from "../../hooks/useLoadingState"; + const Content = styled.div` width: 585px; font-size: 14px; @@ -30,6 +33,7 @@ export interface ConfirmationModalProps { submitButtonText: string; onSubmit: () => void; submitButtonDataId?: string; + cancelButtonText?: string; } export const ConfirmationModal: React.FC = ({ @@ -39,18 +43,24 @@ export const ConfirmationModal: React.FC = ({ onSubmit, submitButtonText, submitButtonDataId, -}) => ( - }> - - - - - - - - - - -); + cancelButtonText, +}) => { + const { isLoading, startAction } = useLoadingState(); + const onSubmitBtnClick = () => startAction({ action: () => onSubmit() }); + + return ( + }> + + + + + + + + + + + + + ); +}; diff --git a/airbyte-webapp/src/components/EntityTable/utils.tsx b/airbyte-webapp/src/components/EntityTable/utils.tsx index 52f42fe829e42..c704377248a90 100644 --- a/airbyte-webapp/src/components/EntityTable/utils.tsx +++ b/airbyte-webapp/src/components/EntityTable/utils.tsx @@ -110,7 +110,9 @@ export const getConnectionSyncStatus = ( status: ConnectionStatus, lastSyncJobStatus: Status | null ): ConnectionSyncStatus => { - if (status === ConnectionStatus.INACTIVE) return ConnectionSyncStatus.INACTIVE; + if (status === ConnectionStatus.INACTIVE) { + return ConnectionSyncStatus.INACTIVE; + } switch (lastSyncJobStatus) { case Status.SUCCEEDED: diff --git a/airbyte-webapp/src/components/JobItem/components/MainInfo.tsx b/airbyte-webapp/src/components/JobItem/components/MainInfo.tsx index d811dd1bb8977..e47df1701c630 100644 --- a/airbyte-webapp/src/components/JobItem/components/MainInfo.tsx +++ b/airbyte-webapp/src/components/JobItem/components/MainInfo.tsx @@ -4,12 +4,13 @@ import React from "react"; import { FormattedDateParts, FormattedMessage, FormattedTimeParts } from "react-intl"; import styled from "styled-components"; -import { Button, StatusIcon } from "components"; +import { LoadingButton, StatusIcon } from "components"; import { Cell, Row } from "components/SimpleTableComponents"; import { Attempt, JobInfo, JobMeta as JobApiItem } from "core/domain/job/Job"; import Status from "core/statuses"; +import useLoadingState from "../../../hooks/useLoadingState"; import { useCancelJob } from "../../../services/job/JobService"; import AttemptDetails from "./AttemptDetails"; @@ -41,7 +42,7 @@ const AttemptCount = styled.div` color: ${({ theme }) => theme.dangerColor}; `; -const CancelButton = styled(Button)` +const CancelButton = styled(LoadingButton)` margin-right: 10px; padding: 3px 7px; z-index: 1; @@ -97,11 +98,12 @@ const MainInfo: React.FC = ({ shortInfo, isPartialSuccess, }) => { + const { isLoading, showFeedback, startAction } = useLoadingState(); const cancelJob = useCancelJob(); - const onCancelJob = async (event: React.SyntheticEvent) => { + const onCancelJob = (event: React.SyntheticEvent) => { event.stopPropagation(); - return cancelJob(job.id); + return startAction({ action: () => cancelJob(job.id) }); }; const isNotCompleted = job.status && [Status.PENDING, Status.RUNNING, Status.INCOMPLETE].includes(job.status); @@ -142,8 +144,14 @@ const MainInfo: React.FC = ({ {!shortInfo && isNotCompleted && ( - - + + )} diff --git a/airbyte-webapp/src/components/ResetDataModal/ResetDataModal.tsx b/airbyte-webapp/src/components/ResetDataModal/ResetDataModal.tsx deleted file mode 100644 index 1431e7ef27999..0000000000000 --- a/airbyte-webapp/src/components/ResetDataModal/ResetDataModal.tsx +++ /dev/null @@ -1,82 +0,0 @@ -import React from "react"; -import { FormattedMessage } from "react-intl"; -import styled from "styled-components"; - -import { Button } from "components"; -import Modal from "components/Modal"; - -import { ModalTypes } from "./types"; - -export type IProps = { - onClose: () => void; - onSubmit: (data: unknown) => void; - modalType?: ModalTypes; -}; - -const Content = styled.div` - padding: 18px 37px 28px; - font-size: 14px; - line-height: 28px; - max-width: 585px; -`; -const ButtonContent = styled.div` - padding-top: 27px; - text-align: right; -`; -const ButtonWithMargin = styled(Button)` - margin-right: 9px; -`; - -const ResetDataModal: React.FC = ({ onClose, onSubmit, modalType }) => { - const modalText = () => { - if (modalType === ModalTypes.RESET_CHANGED_COLUMN) { - return ; - } - - if (modalType === ModalTypes.UPDATE_SCHEMA) { - return ; - } - - return ; - }; - - const modalTitle = () => { - if (modalType === ModalTypes.UPDATE_SCHEMA) { - return ; - } - - return ; - }; - - const modalCancelButtonText = () => { - if (modalType === ModalTypes.UPDATE_SCHEMA) { - return ; - } - - return ; - }; - - const modalSubmitButtonText = () => { - if (modalType === ModalTypes.UPDATE_SCHEMA) { - return ; - } - - return ; - }; - - return ( - - - {modalText()} - - - {modalCancelButtonText()} - - - - - - ); -}; - -export default ResetDataModal; diff --git a/airbyte-webapp/src/components/ResetDataModal/index.tsx b/airbyte-webapp/src/components/ResetDataModal/index.tsx deleted file mode 100644 index befa2b2296dc1..0000000000000 --- a/airbyte-webapp/src/components/ResetDataModal/index.tsx +++ /dev/null @@ -1,3 +0,0 @@ -import ResetDataModal from "./ResetDataModal"; - -export default ResetDataModal; diff --git a/airbyte-webapp/src/components/ResetDataModal/types.ts b/airbyte-webapp/src/components/ResetDataModal/types.ts deleted file mode 100644 index 3311353a055fe..0000000000000 --- a/airbyte-webapp/src/components/ResetDataModal/types.ts +++ /dev/null @@ -1,5 +0,0 @@ -export enum ModalTypes { - RESET_DATA = "ResetData", - RESET_CHANGED_COLUMN = "ResetChangedColumn", - UPDATE_SCHEMA = "UpdateSchema", -} diff --git a/airbyte-webapp/src/components/SingletonCard/SingletonCard.tsx b/airbyte-webapp/src/components/SingletonCard/SingletonCard.tsx index 227ce2ff73648..3fd312ce99a14 100644 --- a/airbyte-webapp/src/components/SingletonCard/SingletonCard.tsx +++ b/airbyte-webapp/src/components/SingletonCard/SingletonCard.tsx @@ -30,6 +30,7 @@ const Singleton = styled.div<{ hasError?: boolean }>` bottom: 49px; left: 50%; transform: translate(-50%, 0); + z-index: 20; padding: 25px 25px 22px; diff --git a/airbyte-webapp/src/components/ToolTip/ToolTip.tsx b/airbyte-webapp/src/components/ToolTip/ToolTip.tsx index 247dd8f25df8c..98ba67b9a2ec1 100644 --- a/airbyte-webapp/src/components/ToolTip/ToolTip.tsx +++ b/airbyte-webapp/src/components/ToolTip/ToolTip.tsx @@ -5,13 +5,13 @@ type ToolTipProps = { control: React.ReactNode; className?: string; disabled?: boolean; - cursor?: "pointer" | "help"; + cursor?: "pointer" | "help" | "not-allowed"; }; -const Control = styled.div<{ $cursor?: "pointer" | "help" }>` +const Control = styled.div<{ $cursor?: "pointer" | "help" | "not-allowed"; $showCursor?: boolean }>` display: inline-block; position: relative; - cursor: ${({ $cursor }) => $cursor ?? "pointer"}; + cursor: ${({ $cursor, $showCursor = true }) => ($showCursor && $cursor) ?? "pointer"}; `; const ToolTipView = styled.div<{ $disabled?: boolean }>` @@ -39,7 +39,7 @@ const ToolTipView = styled.div<{ $disabled?: boolean }>` const ToolTip: React.FC = ({ children, control, className, disabled, cursor }) => { return ( - + {control} {children} diff --git a/airbyte-webapp/src/components/base/Button/LoadingButton.tsx b/airbyte-webapp/src/components/base/Button/LoadingButton.tsx index 2c621077d5c45..1681bba49f1af 100644 --- a/airbyte-webapp/src/components/base/Button/LoadingButton.tsx +++ b/airbyte-webapp/src/components/base/Button/LoadingButton.tsx @@ -21,13 +21,13 @@ const SymbolSpinner = styled(FontAwesomeIcon)` position: absolute; left: 50%; animation: ${SpinAnimation} 1.5s linear 0s infinite; - color: ${({ theme }) => theme.primaryColor}; + color: ${({ theme, danger }) => (danger ? theme.dangerColor : theme.primaryColor)}; margin: -1px 0 -3px -9px; `; const ButtonView = styled(Button)` pointer-events: none; - background: ${({ theme }) => theme.primaryColor25}; + background: ${({ theme, danger }) => (danger ? theme.dangerColor25 : theme.primaryColor25)}; border-color: transparent; position: relative; `; @@ -35,14 +35,18 @@ const ButtonView = styled(Button)` const Invisible = styled.div` color: rgba(255, 255, 255, 0); `; - +/* + * TODO: this component need to be refactored - we need to have + * the only one + ); + + const syncNowBtn = ( + startAction({ action: onSync })} + > + {showFeedback ? ( + + ) : ( + <> + + + + )} + + ); + return ( = ({ connection, frequencyText }) => {connection.status === ConnectionStatus.ACTIVE && (

- - startAction({ action: onSync })} - > - {showFeedback ? ( - - ) : ( - <> - - - - )} - + + + + + +
)} @@ -112,15 +143,6 @@ const StatusView: React.FC = ({ connection, frequencyText }) => > {jobs.length ? : } />} - {isModalOpen && ( - setIsModalOpen(false)} - onSubmit={async () => { - await onReset(); - setIsModalOpen(false); - }} - /> - )} ); }; diff --git a/airbyte-webapp/src/theme.ts b/airbyte-webapp/src/theme.ts index 8844bbe9df708..1176c8b555201 100644 --- a/airbyte-webapp/src/theme.ts +++ b/airbyte-webapp/src/theme.ts @@ -12,6 +12,7 @@ export const theme = { brightColor: "#F7F6FF", dangerColor: "#FF5E7B", + dangerColor25: "rgba(255, 94, 123, .25)", warningColor: "#FFBF00", warningBackgroundColor: "rgba(255, 191, 0, 0.2)", lightDangerColor: "#FEEDEE", diff --git a/airbyte-webapp/src/views/Connection/ConnectionForm/ConnectionForm.test.tsx b/airbyte-webapp/src/views/Connection/ConnectionForm/ConnectionForm.test.tsx index 0b5d811b318ca..12a0c23e8bd98 100644 --- a/airbyte-webapp/src/views/Connection/ConnectionForm/ConnectionForm.test.tsx +++ b/airbyte-webapp/src/views/Connection/ConnectionForm/ConnectionForm.test.tsx @@ -3,9 +3,10 @@ import userEvent from "@testing-library/user-event"; import { Connection, ConnectionNamespaceDefinition, ConnectionStatus } from "core/domain/connection"; import { Destination, Source } from "core/domain/connector"; +import { ConfirmationModalService } from "hooks/services/ConfirmationModal"; import { render } from "utils/testutils"; -import ConnectionForm from "./ConnectionForm"; +import ConnectionForm, { ConnectionFormProps } from "./ConnectionForm"; const mockSource: Source = { sourceId: "test-source", @@ -62,13 +63,23 @@ jest.mock("services/workspaces/WorkspacesService", () => { }; }); +const renderConnectionForm = (props: ConnectionFormProps) => + render( + + + + ); + describe("", () => { let container: HTMLElement; describe("edit mode", () => { beforeEach(async () => { - const renderResult = await render( - - ); + const renderResult = await renderConnectionForm({ + onSubmit: jest.fn(), + mode: "edit", + connection: mockConnection, + }); + container = renderResult.container; }); test("it renders relevant items", async () => { @@ -84,9 +95,12 @@ describe("", () => { }); describe("readonly mode", () => { beforeEach(async () => { - const renderResult = await render( - - ); + const renderResult = await renderConnectionForm({ + onSubmit: jest.fn(), + mode: "readonly", + connection: mockConnection, + }); + container = renderResult.container; }); test("it renders only relevant items for the mode", async () => { diff --git a/airbyte-webapp/src/views/Connection/ConnectionForm/ConnectionForm.tsx b/airbyte-webapp/src/views/Connection/ConnectionForm/ConnectionForm.tsx index b66fa55df9e3f..aeadbd3710f2f 100644 --- a/airbyte-webapp/src/views/Connection/ConnectionForm/ConnectionForm.tsx +++ b/airbyte-webapp/src/views/Connection/ConnectionForm/ConnectionForm.tsx @@ -5,10 +5,9 @@ import styled from "styled-components"; import { ControlLabels, DropDown, DropDownRow, H5, Input, Label } from "components"; import { FormChangeTracker } from "components/FormChangeTracker"; -import ResetDataModal from "components/ResetDataModal"; -import { ModalTypes } from "components/ResetDataModal/types"; import { Connection, ConnectionNamespaceDefinition, ScheduleProperties } from "core/domain/connection"; +import { useConfirmationModalService } from "hooks/services/ConfirmationModal"; import { useFormChangeTrackerService, useUniqueFormId } from "hooks/services/FormChangeTracker"; import { useGetDestinationDefinitionSpecification } from "services/connector/DestinationDefinitionSpecificationService"; import { useCurrentWorkspace } from "services/workspaces/WorkspacesService"; @@ -123,18 +122,28 @@ const ConnectionForm: React.FC = ({ additionalSchemaControl, connection, }) => { + const { openConfirmationModal, closeConfirmationModal } = useConfirmationModalService(); const destDefinition = useGetDestinationDefinitionSpecification(connection.destination.destinationDefinitionId); const { clearFormChange } = useFormChangeTrackerService(); const formId = useUniqueFormId(); - - const [modalIsOpen, setResetModalIsOpen] = useState(false); const [submitError, setSubmitError] = useState(null); - const formatMessage = useIntl().formatMessage; - const initialValues = useInitialValues(connection, destDefinition, mode !== "create"); - const workspace = useCurrentWorkspace(); + + const openResetDataModal = useCallback(() => { + openConfirmationModal({ + title: "form.resetData", + text: "form.changedColumns", + submitButtonText: "form.reset", + cancelButtonText: "form.noNeed", + onSubmit: async () => { + await onReset?.(); + closeConfirmationModal(); + }, + }); + }, [closeConfirmationModal, onReset, openConfirmationModal]); + const onFormSubmit = useCallback( async (values: FormikConnectionFormValues, formikHelpers: FormikHelpers) => { const formValues: ConnectionFormValues = connectionValidationSchema.cast(values, { @@ -152,8 +161,9 @@ const ConnectionForm: React.FC = ({ const requiresReset = mode === "edit" && !equal(initialValues.syncCatalog, values.syncCatalog) && !editSchemeMode; + if (requiresReset) { - setResetModalIsOpen(true); + openResetDataModal(); } result?.onSubmitComplete?.(); @@ -170,6 +180,7 @@ const ConnectionForm: React.FC = ({ mode, initialValues.syncCatalog, editSchemeMode, + openResetDataModal, ] ); @@ -311,16 +322,6 @@ const ConnectionForm: React.FC = ({ )} - {modalIsOpen && ( - setResetModalIsOpen(false)} - onSubmit={async () => { - await onReset?.(); - setResetModalIsOpen(false); - }} - /> - )} )} From 147100f06b22a225086bbe6825970ae6fc76a700 Mon Sep 17 00:00:00 2001 From: Krishna Glick Date: Tue, 10 May 2022 13:31:40 -0700 Subject: [PATCH 17/55] Invalidate connections cache when enabling/disabling connections on connections list page (#12753) --- .../src/hooks/services/useConnectionHook.tsx | 15 +++++++++++++-- .../components/ConnectionsTable.tsx | 6 +++++- 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/airbyte-webapp/src/hooks/services/useConnectionHook.tsx b/airbyte-webapp/src/hooks/services/useConnectionHook.tsx index 846880497e6f4..d69eec7b4e600 100644 --- a/airbyte-webapp/src/hooks/services/useConnectionHook.tsx +++ b/airbyte-webapp/src/hooks/services/useConnectionHook.tsx @@ -1,4 +1,4 @@ -import { useMutation, useQueryClient } from "react-query"; +import { QueryClient, useMutation, useQueryClient } from "react-query"; import { useConfig } from "config"; import FrequencyConfig from "config/FrequencyConfig.json"; @@ -222,4 +222,15 @@ const useConnectionList = (): ListConnection => { return useSuspenseQuery(connectionsKeys.lists(), () => service.list(workspace.workspaceId)); }; -export { useConnectionList, useGetConnection, useUpdateConnection, useCreateConnection, useDeleteConnection }; +const invalidateConnectionsList = async (queryClient: QueryClient) => { + await queryClient.invalidateQueries(connectionsKeys.lists()); +}; + +export { + useConnectionList, + useGetConnection, + useUpdateConnection, + useCreateConnection, + useDeleteConnection, + invalidateConnectionsList, +}; diff --git a/airbyte-webapp/src/pages/ConnectionPage/pages/AllConnectionsPage/components/ConnectionsTable.tsx b/airbyte-webapp/src/pages/ConnectionPage/pages/AllConnectionsPage/components/ConnectionsTable.tsx index f69d70fe84857..1a6bdbd20b218 100644 --- a/airbyte-webapp/src/pages/ConnectionPage/pages/AllConnectionsPage/components/ConnectionsTable.tsx +++ b/airbyte-webapp/src/pages/ConnectionPage/pages/AllConnectionsPage/components/ConnectionsTable.tsx @@ -1,4 +1,5 @@ import React, { useCallback } from "react"; +import { useQueryClient } from "react-query"; import { ConnectionTable } from "components/EntityTable"; import useSyncActions from "components/EntityTable/hooks"; @@ -6,6 +7,7 @@ import { ITableDataItem } from "components/EntityTable/types"; import { getConnectionTableData } from "components/EntityTable/utils"; import { Connection } from "core/domain/connection"; +import { invalidateConnectionsList } from "hooks/services/useConnectionHook"; import useRouter from "hooks/useRouter"; import { useDestinationDefinitionList } from "services/connector/DestinationDefinitionService"; import { useSourceDefinitionList } from "services/connector/SourceDefinitionService"; @@ -17,6 +19,7 @@ type IProps = { const ConnectionsTable: React.FC = ({ connections }) => { const { push } = useRouter(); const { changeStatus, syncManualConnection } = useSyncActions(); + const queryClient = useQueryClient(); const { sourceDefinitions } = useSourceDefinitionList(); @@ -30,9 +33,10 @@ const ConnectionsTable: React.FC = ({ connections }) => { if (connection) { await changeStatus(connection); + await invalidateConnectionsList(queryClient); } }, - [changeStatus, connections] + [changeStatus, connections, queryClient] ); const onSync = useCallback( From f8a35eaa805e5e7a6f34f781a61411c648b3baf2 Mon Sep 17 00:00:00 2001 From: Davin Chia Date: Wed, 11 May 2022 13:02:07 +0800 Subject: [PATCH 18/55] Add Java Catalog documentation. (#12751) Clean up and add better guidelines on how to use the Java catalogs we recently added. Took the chance to move existing documentation to improve reading flow. --- .../gradle-dependency-update.md | 32 +++++++++--- .../updating-documentation.md | 52 +++++++++---------- docs/integrations/sources/s3.md | 15 +++--- docusaurus/sidebars.js | 4 ++ 4 files changed, 62 insertions(+), 41 deletions(-) rename docs/{ => contributing-to-airbyte}/gradle-dependency-update.md (80%) diff --git a/docs/gradle-dependency-update.md b/docs/contributing-to-airbyte/gradle-dependency-update.md similarity index 80% rename from docs/gradle-dependency-update.md rename to docs/contributing-to-airbyte/gradle-dependency-update.md index 5e77b62cd58de..6606af7852d1c 100644 --- a/docs/gradle-dependency-update.md +++ b/docs/contributing-to-airbyte/gradle-dependency-update.md @@ -1,7 +1,10 @@ -# Introduction +# Updating Gradle Dependencies +We use [Gradle Catalogs](https://docs.gradle.org/current/userguide/platforms.html#sub:central-declaration-of-dependencies) +to keep dependencies synced up across different Java projects. This is particularly useful for Airbyte Cloud, and can be +used by any project seeking to build off Airbyte. -This document describes how to update dependency versions for Airbyte's **Gradle** build and how to share them with other **projects**. -Dependencies should be represented as dependency coordinates, that a user can pick from when declaring dependencies in a build script. +Catalogs allow dependencies to be represented as dependency coordinates. A user can reference preset dependencies/versions +when declaring dependencies in a build script. > Version Catalog Example: > ```gradle @@ -9,7 +12,22 @@ Dependencies should be represented as dependency coordinates, that a user can pi > implementation(libs.groovy.core) > } > ``` -> In this context, libs is a catalog and groovy represents a dependency available in this catalog. +> In this context, libs is a catalog and groovy represents a dependency available in this catalog. Instead of declaring a +> specific version, we reference the version in the Catalog. + +This helps reduce the chances of dependency drift and dependency hell. + +Thus, please use the Catalog when: +- declaring new common dependencies. +- specifying new common dependencies. + +A common dependency is a foundational Java package e.g. Apache commons, Log4j etc that is often the basis on which libraries +are built upon. + +This is a relatively new addition, so devs should keep this in mind and use the top-level Catalog on a best-effort basis. + +### Setup Details +This section is for engineers wanting to understand Gradle Catalog details and how Airbyte has set this up. #### The version catalog TOML file format Gradle offers a conventional file to declare a catalog. @@ -114,7 +132,7 @@ There should be specified section `dependencyResolutionManagement` which uses `d > ``` #### Sharing Catalogs -To share catalog for further usage by other Projects need to do 2 steps: +To share this catalog for further usage by other Projects, we do the following 2 steps: - Define `version-catalog` plugin in `build.gradle` file (ignore if this record exists) ```gradle plugins { @@ -131,14 +149,14 @@ To share catalog for further usage by other Projects need to do 2 steps: ``` #### Configure the Plugin Publishing Plugin -For **Publishing** need to define `maven-publish` plugin in `build.gradle` file (ignore if this record exists): +To **Publishing**, first define the `maven-publish` plugin in `build.gradle` file (ignore if this already exists): ```gradle plugins { id '...' id 'maven-publish' } ``` -Further on need to describe publishing section. Please use [this](https://docs.gradle.org/current/userguide/publishing_gradle_plugins.html) official documentation for more details. +After that, describe the publishing section. Please use [this](https://docs.gradle.org/current/userguide/publishing_gradle_plugins.html) official documentation for more details. > Example: > ```gradle > publishing { diff --git a/docs/contributing-to-airbyte/updating-documentation.md b/docs/contributing-to-airbyte/updating-documentation.md index cb0d59b6d88f6..6440037ca9864 100644 --- a/docs/contributing-to-airbyte/updating-documentation.md +++ b/docs/contributing-to-airbyte/updating-documentation.md @@ -1,12 +1,15 @@ # Updating Documentation -Our documentation uses [GitBook](https://gitbook.com), and all the [Markdown](https://guides.github.com/features/mastering-markdown/) files are stored in our Github repository. +Documentation is written as [Markdown](https://guides.github.com/features/mastering-markdown/) files and stored in our Github repository. ## Workflow for updating docs 1. Modify docs using Git or the Github UI \(All docs live in the `docs/` folder in the [Airbyte repository](https://github.com/airbytehq/airbyte)\) 2. If you're adding new files, update `docs/SUMMARY.md`. -4. Create a Pull Request +3. Create a Pull Request + +### Sidebar updates +To edit the sidebar you must [edit this JSON in this Javascript file](https://github.com/airbytehq/airbyte/blob/master/docusaurus/sidebars.js). ### Modify in the Github UI @@ -35,6 +38,27 @@ Our documentation uses [GitBook](https://gitbook.com), and all the [Markdown](ht 3. Modify the documentation. 4. Create a pull request +### Testing Changes +* You can run a copy of the website locally to test how your changes will look in production +* This is not necessary for smaller changes, but is suggested for large changes and **any** change to the sidebar, as the JSON will blow up if we misplace a comma. +* You will need [yarn](https://yarnpkg.com) installed locally to build docusaurus +* Run the following commands +```bash +cd docusaurus +yarn install +yarn build +yarn serve +``` + +You can now navigate to [http://localhost:3000/](http://localhost:3000/) to see your changes. You can stop the running server in OSX/Linux by pressing `control-c` in the terminal running the server + +### Deploying the docs website +We use Github Pages for hosting this docs website, and Docusaurus as the docs framework. An [internal guide for deployment lives here](https://github.com/airbytehq/runbooks/blob/master/deploying_and_reverting_docs.md). + +The source code for the docs lives in the [airbyte monorepo's `docs/` directory](https://github.com/airbytehq/airbyte/tree/master/docs). To publish the updated docs on this website after you've committed a change to the `docs/` markdown files, it is required to locally run a manual publish flow. Locally run `./tools/bin/deploy_docusaurus` from the `airbyte` monorepo project root to deploy this docs website. + +Automating this process via CI is currently not easy because we push to a [dedicated repo hosting the Github pages](https://github.com/airbytehq/airbytehq.github.io) from the `airbyte` monorepo, which is hard to do in CI. This is not intended to be the end state (we will need to publish these docs via CI eventually), but as of May 2022 have decided the juice isn't worth the squeeze just yet. + ## Documentation Best Practices Connectors typically have the following documentation elements: @@ -57,9 +81,6 @@ Every module should have a README containing: * how to build and run the code locally & via Docker * any other information needed for local iteration -### Sidebar updates -# To edit the sidebar you must [edit this JSON in this Javascript file](https://github.com/airbytehq/airbyte/blob/master/docusaurus/sidebars.js) - ### Changelogs **Core** @@ -135,24 +156,3 @@ _good context_: ```text When creating or updating connectors, we spend a lot of time manually transcribing JSON Schema files based on OpenAPI docs. This is ncessary because OpenAPI and JSON schema are very similar but not perfectly compatible. This process is automatable. Therefore we should create a program which converts from OpenAPI to JSONSchema format. ``` - -## Testing Changes -* You can run a copy of the website locally to test how your changes will look in production -* This is not necessary for smaller changes, but is suggested for large changes and **any** change to the sidebar, as the JSON will blow up if we misplace a comma. -* You will need [yarn](https://yarnpkg.com) installed locally to build docusaurus -* Run the following commands -```bash -cd docusaurus -yarn install -yarn build -yarn serve -``` - -You can now navigate to [http://localhost:3000/](http://localhost:3000/) to see your changes. You can stop the running server in OSX/Linux by pressing `control-c` in the terminal running the server - -## Deploying the docs website -We use Github Pages for hosting this docs website, and Docusaurus as the docs framework. An [internal guide for deployment lives here](https://github.com/airbytehq/runbooks/blob/master/deploying_and_reverting_docs.md). - -The source code for the docs lives in the [airbyte monorepo's `docs/` directory](https://github.com/airbytehq/airbyte/tree/master/docs). To publish the updated docs on this website after you've committed a change to the `docs/` markdown files, it is required to locally run a manual publish flow. Locally run `./tools/bin/deploy_docusaurus` from the `airbyte` monorepo project root to deploy this docs website. - -Automating this process via CI is currently not easy because we push to a [dedicated repo hosting the Github pages](https://github.com/airbytehq/airbytehq.github.io) from the `airbyte` monorepo, which is hard to do in CI. This is not intended to be the end state (we will need to publish these docs via CI eventually), but as of May 2022 have decided the juice isn't worth the squeeze just yet. diff --git a/docs/integrations/sources/s3.md b/docs/integrations/sources/s3.md index 526f5a634681b..f4e664d4bcba8 100644 --- a/docs/integrations/sources/s3.md +++ b/docs/integrations/sources/s3.md @@ -4,9 +4,8 @@ This page contains the setup guide and reference information for the Amazon S3 s ## Prerequisites - - - +- Connector-specific prerequisites which are required in both Airbyte Cloud & OSS. +- If OSS has different requirements (e.g: user needs to setup a developer application). ## Setup guide @@ -20,12 +19,12 @@ This page contains the setup guide and reference information for the Amazon S3 s 1. [Log into your Airbyte Cloud](https://cloud.airbyte.io/workspaces) account. 2. In the left navigation bar, click ****. In the top-right corner, click **+new source/destination**. -3. On the Set up the page, enter the name for the connector and select **** from the type dropdown. +3. On the Set up the source/destination page, enter the name for the `connector name` connector and select **connector name** from the `Source/Destination` type dropdown. 4. Set `dataset` appropriately. This will be the name of the table in the destination. -3. If your bucket contains _only_ files containing data for this table, use `**` as path\_pattern. See the [Path Patterns section](s3.md#path-patterns) for more specific pattern matching. -4. Leave schema as `{}` to automatically infer it from the file\(s\). For details on providing a schema, see the [User Schema section](s3.md#user-schema). -5. Fill in the fields within the provider box appropriately. If your bucket is not public, add [credentials](https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html#access-keys-and-secret-access-keys) with sufficient permissions under `aws_access_key_id` and `aws_secret_access_key`. -6. Choose the format corresponding to the format of your files and fill in fields as required. If unsure about values, try out the defaults and come back if needed. Find details on these settings [here](s3.md#file-format-settings). +5. If your bucket contains _only_ files containing data for this table, use `**` as path\_pattern. See the [Path Patterns section](s3.md#path-patterns) for more specific pattern matching. +6. Leave schema as `{}` to automatically infer it from the file\(s\). For details on providing a schema, see the [User Schema section](s3.md#user-schema). +7. Fill in the fields within the provider box appropriately. If your bucket is not public, add [credentials](https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html#access-keys-and-secret-access-keys) with sufficient permissions under `aws_access_key_id` and `aws_secret_access_key`. +8. Choose the format corresponding to the format of your files and fill in fields as required. If unsure about values, try out the defaults and come back if needed. Find details on these settings [here](s3.md#file-format-settings). ### For Airbyte OSS: diff --git a/docusaurus/sidebars.js b/docusaurus/sidebars.js index ae16c1cced3a7..52945dbfdfb72 100644 --- a/docusaurus/sidebars.js +++ b/docusaurus/sidebars.js @@ -1073,6 +1073,10 @@ module.exports = { type: 'doc', id: "contributing-to-airbyte/gradle-cheatsheet", }, + { + type: 'doc', + id: "contributing-to-airbyte/gradle-dependency-update", + }, { type: 'doc', id: "contributing-to-airbyte/updating-documentation", From a0b7e4a09b6bad1e6ef81648c2991f558a411059 Mon Sep 17 00:00:00 2001 From: swyx Date: Wed, 11 May 2022 20:08:21 +0800 Subject: [PATCH 19/55] Docs: add reminder to clone repo in CDK speedrun guide (#12761) --- docs/connector-development/tutorials/cdk-speedrun.md | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/docs/connector-development/tutorials/cdk-speedrun.md b/docs/connector-development/tutorials/cdk-speedrun.md index 56977c1889ac3..76e2e1cdda885 100644 --- a/docs/connector-development/tutorials/cdk-speedrun.md +++ b/docs/connector-development/tutorials/cdk-speedrun.md @@ -17,8 +17,11 @@ If you are a visual learner and want to see a video version of this guide going #### Generate the Template ```bash -$ cd airbyte-integrations/connector-templates/generator # start from repo root -$ ./generate.sh +# # clone the repo if you havent already +# git clone -–depth 1 https://github.com/airbytehq/airbyte/ +# cd airbyte # start from repo root +cd airbyte-integrations/connector-templates/generator +./generate.sh ``` Select the `Python HTTP API Source` and name it `python-http-example`. From 23b655e73125d4e481b1d0cf9b645dd9cd1cc0ec Mon Sep 17 00:00:00 2001 From: "Chun-Sheng, Li" Date: Wed, 11 May 2022 20:09:50 +0800 Subject: [PATCH 20/55] Docs: Fix short tag link (#12737) --- docs/project-overview/licenses/license-faq.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/project-overview/licenses/license-faq.md b/docs/project-overview/licenses/license-faq.md index 12b070d164e04..837ae5a5fd3d7 100644 --- a/docs/project-overview/licenses/license-faq.md +++ b/docs/project-overview/licenses/license-faq.md @@ -11,7 +11,7 @@ ## About Elastic License 2.0 (ELv2) ELv2 is a simple, non-copyleft license, allowing for the right to “use, copy, distribute, make available, and prepare derivative works of the software”. Anyone can use Airbyte, free of charge. You can run the software at scale on your infrastructure. There are only three high-level limitations. You cannot: -1. Provide the products to others as a managed service ([read more](license-faq#what-is-the-managed-service-use-case-that-is-not-allowed-under-elv2)); +1. Provide the products to others as a managed service ([read more](#what-is-the-managed-service-use-case-that-is-not-allowed-under-elv2)); 2. Circumvent the license key functionality or remove/obscure features protected by license keys; or 3. Remove or obscure any licensing, copyright, or other notices. @@ -26,7 +26,7 @@ If you are an Airbyte Cloud customer, nothing changes for you. For open-source users, everyone can continue to use Airbyte as they are doing today: no limitations on volume, number of users, number of connections… There are only a few high-level limitations. You cannot: -1. Provide the products to others as a managed service. For example, you cannot sell a cloud service that provides users with direct access to Airbyte. You can sell access to applications built and run using Airbyte ([read more](license-faq#what-is-the-managed-service-use-case-that-is-not-allowed-under-elv2)). +1. Provide the products to others as a managed service. For example, you cannot sell a cloud service that provides users with direct access to Airbyte. You can sell access to applications built and run using Airbyte ([read more](#what-is-the-managed-service-use-case-that-is-not-allowed-under-elv2)). 2. Circumvent the license key functionality or remove/obscure features protected by license keys. For example, our code may contain watermarks or keys to unlock proprietary functionality. Those elements of our code will be marked in our source code. You can’t remove or change them. ### Why did Airbyte adopt ELv2? From 2617f833bb9063624da83e8e6a31227556a8167d Mon Sep 17 00:00:00 2001 From: Amruta Ranade <11484018+Amruta-Ranade@users.noreply.github.com> Date: Wed, 11 May 2022 17:45:04 +0530 Subject: [PATCH 21/55] Updated connector statuses (#12765) --- docs/integrations/README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/integrations/README.md b/docs/integrations/README.md index febc683189807..a92a127100841 100644 --- a/docs/integrations/README.md +++ b/docs/integrations/README.md @@ -21,7 +21,7 @@ For more information about the grading system, see [Product Release Stages](http | [Amazon Ads](sources/amazon-ads.md) | Alpha | No | | [Amazon Seller Partner](sources/amazon-seller-partner.md) | Alpha | No | | [Amazon SQS](sources/amazon-sqs.md) | Alpha | Yes | -| [Amplitude](sources/amplitude.md) | Alpha | Yes | +| [Amplitude](sources/amplitude.md) | Beta | Yes | | [Apify Dataset](sources/apify-dataset.md) | Alpha | Yes | | [Appstore](sources/appstore.md) | Alpha | No | | [Asana](sources/asana.md) | Alpha | No | @@ -85,7 +85,7 @@ For more information about the grading system, see [Product Release Stages](http | [Linnworks](sources/linnworks.md) | Alpha | Yes | | [Looker](sources/looker.md) | Alpha | Yes | | [Magento](sources/magento.md) | Alpha | No | -| [Mailchimp](sources/mailchimp.md) | Alpha | Yes | +| [Mailchimp](sources/mailchimp.md) | Beta | Yes | | [Marketo](sources/marketo.md) | Alpha | Yes | | [Microsoft Dynamics AX](sources/microsoft-dynamics-ax.md) | Alpha | No | | [Microsoft Dynamics Customer Engagement](sources/microsoft-dynamics-customer-engagement.md) | Alpha | No | @@ -135,7 +135,7 @@ For more information about the grading system, see [Product Release Stages](http | [Shopify](sources/shopify.md) | Alpha | Yes | | [Short.io](sources/shortio.md) | Alpha | Yes | | [Slack](sources/slack.md) | Alpha | No | -| [Smartsheets](sources/smartsheets.md) | Alpha | No | +| [Smartsheets](sources/smartsheets.md) | Beta | Yes | | [Snapchat Marketing](sources/snapchat-marketing.md) | Alpha | Yes | | [Snowflake](sources/snowflake.md) | Alpha | Yes | | [Spree Commerce](sources/spree-commerce.md) | Alpha | No | @@ -146,7 +146,7 @@ For more information about the grading system, see [Product Release Stages](http | [SurveyMonkey](sources/surveymonkey.md) | Alpha | No | | [Tempo](sources/tempo.md) | Alpha | Yes | | [TiDB](sources/tidb.md) | Alpha | No | -| [TikTok Marketing](./sources/tiktok-marketing.md) | Alpha | No | +| [TikTok Marketing](./sources/tiktok-marketing.md) | Beta | Yes | | [Trello](sources/trello.md) | Alpha | No | | [Twilio](sources/twilio.md) | Alpha | Yes | | [Typeform](sources/typeform.md) | Alpha | Yes | From f9188590cc61b37e159ec405dc1209f80939eac3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Melker=20=C3=96hrman?= <101633418+mohrman@users.noreply.github.com> Date: Wed, 11 May 2022 17:59:58 +0200 Subject: [PATCH 22/55] Add avro parser to s3 source (#12602) * added MVP avro parser running fine locally * added unit tests for avro * added wip state of avro integration test setup * deleted unused files * added avro specific config path * fixed comments. Added nested record support, simplify code and minor fixes * bumped version + docs update * Added working acceptance tests + format * auto-bump connector version Co-authored-by: George Claireaux Co-authored-by: Octavia Squidington III --- .../resources/seed/source_definitions.yaml | 2 +- .../src/main/resources/seed/source_specs.yaml | 11 +- .../connectors/source-s3/Dockerfile | 2 +- .../source-s3/acceptance-test-config.yml | 22 ++++ .../expected_records_avro.txt | 10 ++ .../source-s3/integration_tests/spec.json | 14 +- .../connectors/source-s3/setup.py | 3 + .../formats/avro_parser.py | 88 +++++++++++++ .../formats/avro_spec.py | 17 +++ .../source_s3/source_files_abstract/spec.py | 3 +- .../source_s3/source_files_abstract/stream.py | 2 + .../source-s3/unit_tests/test_avro_parser.py | 120 ++++++++++++++++++ docs/integrations/sources/s3.md | 4 + 13 files changed, 293 insertions(+), 5 deletions(-) create mode 100644 airbyte-integrations/connectors/source-s3/integration_tests/expected_records_avro.txt create mode 100644 airbyte-integrations/connectors/source-s3/source_s3/source_files_abstract/formats/avro_parser.py create mode 100644 airbyte-integrations/connectors/source-s3/source_s3/source_files_abstract/formats/avro_spec.py create mode 100644 airbyte-integrations/connectors/source-s3/unit_tests/test_avro_parser.py diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index 4ca4011de19d5..7e2823e6863a7 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -779,7 +779,7 @@ - name: S3 sourceDefinitionId: 69589781-7828-43c5-9f63-8925b1c1ccc2 dockerRepository: airbyte/source-s3 - dockerImageTag: 0.1.11 + dockerImageTag: 0.1.12 documentationUrl: https://docs.airbyte.io/integrations/sources/s3 icon: s3.svg sourceType: file diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index 5c0fd5ebcea68..c1adfc02f6e3d 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -7144,7 +7144,7 @@ path_in_connector_config: - "credentials" - "client_secret" -- dockerImage: "airbyte/source-s3:0.1.11" +- dockerImage: "airbyte/source-s3:0.1.12" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/s3" changelogUrl: "https://docs.airbyte.io/integrations/sources/s3" @@ -7314,6 +7314,15 @@ \ your data is particularly wide." default: 2 type: "integer" + - title: "Avro" + description: "This connector utilises fastavro for Avro parsing." + type: "object" + properties: + filetype: + title: "Filetype" + const: "avro" + type: "string" schema: title: "Manually enforced data schema (Optional)" description: "Optionally provide a schema to enforce, as a valid JSON string.\ diff --git a/airbyte-integrations/connectors/source-s3/Dockerfile b/airbyte-integrations/connectors/source-s3/Dockerfile index 6c2cf7ccbe486..aed9ea734e425 100644 --- a/airbyte-integrations/connectors/source-s3/Dockerfile +++ b/airbyte-integrations/connectors/source-s3/Dockerfile @@ -17,5 +17,5 @@ COPY source_s3 ./source_s3 ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.11 +LABEL io.airbyte.version=0.1.12 LABEL io.airbyte.name=airbyte/source-s3 diff --git a/airbyte-integrations/connectors/source-s3/acceptance-test-config.yml b/airbyte-integrations/connectors/source-s3/acceptance-test-config.yml index 40770e58c00a2..bf55f54e0d9dc 100644 --- a/airbyte-integrations/connectors/source-s3/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-s3/acceptance-test-config.yml @@ -11,6 +11,9 @@ tests: # for Parquet format - config_path: "secrets/parquet_config.json" status: "succeed" + # # for Avro format + - config_path: "secrets/avro_config.json" + status: "succeed" # for custom server - config_path: "integration_tests/config_minio.json" status: "succeed" @@ -21,6 +24,8 @@ tests: - config_path: "secrets/config.json" # for Parquet format - config_path: "secrets/parquet_config.json" + # # for Avro format + - config_path: "secrets/avro_config.json" # for custom server - config_path: "integration_tests/config_minio.json" basic_read: @@ -36,6 +41,12 @@ tests: configured_catalog_path: "integration_tests/parquet_configured_catalog.json" expect_records: path: "integration_tests/parquet_expected_records.txt" + # for Avro format + - config_path: "secrets/avro_config.json" + timeout_seconds: 1800 + configured_catalog_path: "integration_tests/configured_catalog.json" + expect_records: + path: "integration_tests/expected_records_avro.txt" # for custom server - config_path: "integration_tests/config_minio.json" timeout_seconds: 1800 @@ -61,6 +72,13 @@ tests: cursor_paths: test: ["_ab_source_file_last_modified"] future_state_path: "integration_tests/abnormal_state.json" + # for Avro format + - config_path: "secrets/avro_config.json" + timeout_seconds: 1800 + configured_catalog_path: "integration_tests/configured_catalog.json" + cursor_paths: + test: ["_ab_source_file_last_modified"] + future_state_path: "integration_tests/abnormal_state.json" # for custom server - config_path: "integration_tests/config_minio.json" timeout_seconds: 1800 @@ -78,6 +96,10 @@ tests: - config_path: "secrets/parquet_config.json" timeout_seconds: 1800 configured_catalog_path: "integration_tests/parquet_configured_catalog.json" + # for Avro format + - config_path: "secrets/avro_config.json" + timeout_seconds: 1800 + configured_catalog_path: "integration_tests/configured_catalog.json" # for custom server - config_path: "integration_tests/config_minio.json" timeout_seconds: 1800 diff --git a/airbyte-integrations/connectors/source-s3/integration_tests/expected_records_avro.txt b/airbyte-integrations/connectors/source-s3/integration_tests/expected_records_avro.txt new file mode 100644 index 0000000000000..d4836fa10530d --- /dev/null +++ b/airbyte-integrations/connectors/source-s3/integration_tests/expected_records_avro.txt @@ -0,0 +1,10 @@ +{"stream": "test", "data": {"id": 0, "fullname_and_valid": {"fullname": "cfjwIzCRTL", "valid": false}, "_ab_additional_properties": {}, "_ab_source_file_last_modified": "2022-05-11T11:54:11+0000", "_ab_source_file_url": "test_sample.avro"}, "emitted_at": 10000000} +{"stream": "test", "data": {"id": 1, "fullname_and_valid": {"fullname": "LYOnPyuTWw", "valid": true}, "_ab_additional_properties": {}, "_ab_source_file_last_modified": "2022-05-11T11:54:11+0000", "_ab_source_file_url": "test_sample.avro"}, "emitted_at": 10000000} +{"stream": "test", "data": {"id": 2, "fullname_and_valid": {"fullname": "hyTFbsxlRB", "valid": false}, "_ab_additional_properties": {}, "_ab_source_file_last_modified": "2022-05-11T11:54:11+0000", "_ab_source_file_url": "test_sample.avro"}, "emitted_at": 10000000} +{"stream": "test", "data": {"id": 3, "fullname_and_valid": {"fullname": "ooEUiFcFqp", "valid": true}, "_ab_additional_properties": {}, "_ab_source_file_last_modified": "2022-05-11T11:54:11+0000", "_ab_source_file_url": "test_sample.avro"}, "emitted_at": 10000000} +{"stream": "test", "data": {"id": 4, "fullname_and_valid": {"fullname": "pveENwAvOg", "valid": true}, "_ab_additional_properties": {}, "_ab_source_file_last_modified": "2022-05-11T11:54:11+0000", "_ab_source_file_url": "test_sample.avro"}, "emitted_at": 10000000} +{"stream": "test", "data": {"id": 5, "fullname_and_valid": {"fullname": "pPhWgQgZFq", "valid": true}, "_ab_additional_properties": {}, "_ab_source_file_last_modified": "2022-05-11T11:54:11+0000", "_ab_source_file_url": "test_sample.avro"}, "emitted_at": 10000000} +{"stream": "test", "data": {"id": 6, "fullname_and_valid": {"fullname": "MRNMXFkXZo", "valid": true}, "_ab_additional_properties": {}, "_ab_source_file_last_modified": "2022-05-11T11:54:11+0000", "_ab_source_file_url": "test_sample.avro"}, "emitted_at": 10000000} +{"stream": "test", "data": {"id": 7, "fullname_and_valid": {"fullname": "MXvEWMgnIr", "valid": true}, "_ab_additional_properties": {}, "_ab_source_file_last_modified": "2022-05-11T11:54:11+0000", "_ab_source_file_url": "test_sample.avro"}, "emitted_at": 10000000} +{"stream": "test", "data": {"id": 8, "fullname_and_valid": {"fullname": "rqmFGqZqdF", "valid": true}, "_ab_additional_properties": {}, "_ab_source_file_last_modified": "2022-05-11T11:54:11+0000", "_ab_source_file_url": "test_sample.avro"}, "emitted_at": 10000000} +{"stream": "test", "data": {"id": 9, "fullname_and_valid": {"fullname": "lmPpQTcPFM", "valid": true}, "_ab_additional_properties": {}, "_ab_source_file_last_modified": "2022-05-11T11:54:11+0000", "_ab_source_file_url": "test_sample.avro"}, "emitted_at": 10000000} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-s3/integration_tests/spec.json b/airbyte-integrations/connectors/source-s3/integration_tests/spec.json index 80ec9ec7f9752..ea64990e246bb 100644 --- a/airbyte-integrations/connectors/source-s3/integration_tests/spec.json +++ b/airbyte-integrations/connectors/source-s3/integration_tests/spec.json @@ -148,6 +148,18 @@ "type": "integer" } } + }, + { + "title": "Avro", + "description": "This connector utilises fastavro for Avro parsing.", + "type": "object", + "properties": { + "filetype": { + "title": "Filetype", + "const": "avro", + "type": "string" + } + } } ] }, @@ -221,4 +233,4 @@ }, "supportsIncremental": true, "supported_destination_sync_modes": ["overwrite", "append", "append_dedup"] -} \ No newline at end of file +} diff --git a/airbyte-integrations/connectors/source-s3/setup.py b/airbyte-integrations/connectors/source-s3/setup.py index 678b026d37481..f8c27498e95e5 100644 --- a/airbyte-integrations/connectors/source-s3/setup.py +++ b/airbyte-integrations/connectors/source-s3/setup.py @@ -12,6 +12,8 @@ "wcmatch==8.2", "dill==0.3.4", "pytz", + "fastavro==1.4.11", + "python-snappy==0.6.1", ] TEST_REQUIREMENTS = [ @@ -22,6 +24,7 @@ "pytest-order", "netifaces~=0.11.0", "docker", + "avro==1.11.0", ] setup( diff --git a/airbyte-integrations/connectors/source-s3/source_s3/source_files_abstract/formats/avro_parser.py b/airbyte-integrations/connectors/source-s3/source_s3/source_files_abstract/formats/avro_parser.py new file mode 100644 index 0000000000000..ad569ae15402e --- /dev/null +++ b/airbyte-integrations/connectors/source-s3/source_s3/source_files_abstract/formats/avro_parser.py @@ -0,0 +1,88 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + +from typing import Any, BinaryIO, Iterator, Mapping, TextIO, Union + +import fastavro +from fastavro import reader + +from .abstract_file_parser import AbstractFileParser + +# mapping from apache avro docs: https://avro.apache.org/docs/current/spec.html#schema_complex +data_type_mapping = { + "null": "null", + "boolean": "boolean", + "int": "integer", + "long": "integer", + "float": "number", + "double": "number", + "bytes": "string", + "string": "string", + "record": "object", + "enum": "string", + "array": "array", + "map": "object", + "fixed": "string", +} + + +class AvroParser(AbstractFileParser): + def __init__(self, *args: Any, **kwargs: Any): + super().__init__(*args, **kwargs) + + @property + def is_binary(self) -> bool: + return True + + def _parse_data_type(self, data_type_mapping: dict, avro_schema: dict) -> dict: + """Convert data types from avro to json format + :param data_type_mapping: mapping from avro to json data types + :param avro_schema: schema comes with the avro file + :return schema_dict with data types converted from avro to json standards + """ + schema_dict = {} + for i in avro_schema["fields"]: + data_type = i["type"] + # If field is nullable there will be a list of types and we need to make sure to map the whole list according to data_type_mapping + if isinstance(data_type, list): + schema_dict[i["name"]] = [data_type_mapping[dtype] for dtype in data_type] + # TODO: Figure out a better way to handle nested records. Currently a nested record is returned as a string + elif isinstance(data_type, dict): + schema_dict[i["name"]] = "string" + elif data_type in data_type_mapping: + schema_dict[i["name"]] = data_type_mapping[data_type] + else: + raise TypeError(f"unsupported data type: {data_type} found in avro file") + return schema_dict + + def _get_avro_schema(self, file: Union[TextIO, BinaryIO]) -> dict: + """Extract schema for records + :param file: file-like object (opened via StorageFile) + :return schema extracted from the avro file + """ + avro_reader = fastavro.reader(file) + schema = avro_reader.writer_schema + if not schema["type"] == "record": + unsupported_type = schema["type"] + raise (f"Only record based avro files are supported. Found {unsupported_type}") + else: + return schema + + def get_inferred_schema(self, file: Union[TextIO, BinaryIO]) -> dict: + """Return schema + :param file: file-like object (opened via StorageFile) + :return: mapping of {columns:datatypes} where datatypes are JsonSchema types + """ + avro_schema = self._get_avro_schema(file) + schema_dict = self._parse_data_type(data_type_mapping, avro_schema) + return schema_dict + + def stream_records(self, file: Union[TextIO, BinaryIO]) -> Iterator[Mapping[str, Any]]: + """Stream the data using a generator + :param file: file-like object (opened via StorageFile) + :yield: data record as a mapping of {columns:values} + """ + avro_reader = reader(file) + for record in avro_reader: + yield record diff --git a/airbyte-integrations/connectors/source-s3/source_s3/source_files_abstract/formats/avro_spec.py b/airbyte-integrations/connectors/source-s3/source_s3/source_files_abstract/formats/avro_spec.py new file mode 100644 index 0000000000000..7af2e26fef536 --- /dev/null +++ b/airbyte-integrations/connectors/source-s3/source_s3/source_files_abstract/formats/avro_spec.py @@ -0,0 +1,17 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + +from pydantic import BaseModel, Field + + +class AvroFormat(BaseModel): + 'This connector utilises fastavro for Avro parsing.' + + class Config: + title = "Avro" + + filetype: str = Field( + "avro", + const=True, + ) diff --git a/airbyte-integrations/connectors/source-s3/source_s3/source_files_abstract/spec.py b/airbyte-integrations/connectors/source-s3/source_s3/source_files_abstract/spec.py index 5886f484f0a1a..f0875deddd748 100644 --- a/airbyte-integrations/connectors/source-s3/source_s3/source_files_abstract/spec.py +++ b/airbyte-integrations/connectors/source-s3/source_s3/source_files_abstract/spec.py @@ -10,6 +10,7 @@ from jsonschema import RefResolver from pydantic import BaseModel, Field +from .formats.avro_spec import AvroFormat from .formats.csv_spec import CsvFormat from .formats.parquet_spec import ParquetFormat @@ -59,7 +60,7 @@ class SourceFilesAbstractSpec(BaseModel): order=10, ) - format: Union[CsvFormat, ParquetFormat] = Field( + format: Union[CsvFormat, ParquetFormat, AvroFormat] = Field( default="csv", title="File Format", description="The format of the files you'd like to replicate", order=20 ) diff --git a/airbyte-integrations/connectors/source-s3/source_s3/source_files_abstract/stream.py b/airbyte-integrations/connectors/source-s3/source_s3/source_files_abstract/stream.py index bf7c9ce1c8caa..9c1dce3bba7c6 100644 --- a/airbyte-integrations/connectors/source-s3/source_s3/source_files_abstract/stream.py +++ b/airbyte-integrations/connectors/source-s3/source_s3/source_files_abstract/stream.py @@ -18,6 +18,7 @@ from .file_info import FileInfo from .formats.abstract_file_parser import AbstractFileParser +from .formats.avro_parser import AvroParser from .formats.csv_parser import CsvParser from .formats.parquet_parser import ParquetParser from .storagefile import StorageFile @@ -38,6 +39,7 @@ def fileformatparser_map(self) -> Mapping[str, type]: return { "csv": CsvParser, "parquet": ParquetParser, + "avro": AvroParser, } # TODO: make these user configurable in spec.json diff --git a/airbyte-integrations/connectors/source-s3/unit_tests/test_avro_parser.py b/airbyte-integrations/connectors/source-s3/unit_tests/test_avro_parser.py new file mode 100644 index 0000000000000..313fac2753db4 --- /dev/null +++ b/airbyte-integrations/connectors/source-s3/unit_tests/test_avro_parser.py @@ -0,0 +1,120 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + +import os +import random +import string +from random import randrange +from typing import Any, Mapping + +from avro import datafile, io, schema +from source_s3.source_files_abstract.formats.avro_parser import AvroParser + +from .abstract_test_parser import AbstractTestParser +from .conftest import TMP_FOLDER + +simple_schema_str = """{ + "type": "record", + "name": "sampleAvro", + "namespace": "AVRO", + "fields": [ + {"name": "name", "type": "string"}, + {"name": "age", "type": ["int", "null"]}, + {"name": "address", "type": ["float", "null"]}, + {"name": "street", "type": "float"}, + {"name": "valid", "type": "boolean"} + ] +}""" + +nested_records_schema_str = """{ + "type": "record", + "name": "sampleAvroNested", + "namespace": "AVRO", + "fields": [ + {"name": "lastname", "type": "string"}, + {"name": "address","type": { + "type" : "record", + "name" : "AddressUSRecord", + "fields" : [ + {"name": "streetaddress", "type": "string"}, + {"name": "city", "type": "string"} + ] + } + } + ] +}""" + +nested_schema_output = {"lastname": "string", "address": "string"} + +master_schema = { + "name": "string", + "age": ["integer", "null"], + "address": ["number", "null"], + "street": "number", + "valid": "boolean", +} + + +class TestAvroParser(AbstractTestParser): + filetype = "avro" + + @classmethod + def generate_avro_file(cls, schema_str: str, out_file, num_rows: int) -> str: + """Creates an avro file and saves to tmp folder to be used by test cases + :param schema_str: valid avro schema as a string + :param out_file: name of file to be created + :param num_rows: number of rows to be generated + :return: string with path to the file created + """ + filename = os.path.join(TMP_FOLDER, out_file + "." + cls.filetype) + parsed_schema = schema.parse(schema_str) + rec_writer = io.DatumWriter(parsed_schema) + file_writer = datafile.DataFileWriter(open(filename, "wb"), rec_writer, parsed_schema) + for _ in range(num_rows): + data = {} + data["name"] = "".join(random.choice(string.ascii_letters) for i in range(10)) + data["age"] = randrange(-100, 100) + data["address"] = random.uniform(1.1, 100.10) + data["street"] = random.uniform(1.1, 100.10) + data["valid"] = random.choice([True, False]) + file_writer.append(data) + file_writer.close() + return filename + + @classmethod + def cases(cls) -> Mapping[str, Any]: + """ + return test cases + """ + cases = {} + # test basic file with data type conversions + cases["simple_test"] = { + "AbstractFileParser": AvroParser(format=cls.filetype), + "filepath": cls.generate_avro_file(simple_schema_str, "test_file", 1000), + "num_records": 1000, + "inferred_schema": master_schema, + "line_checks": {}, + "fails": [], + } + # test file with 0 records. Will pass but not ingest anything + cases["test_zero_rows"] = { + "AbstractFileParser": AvroParser(format=cls.filetype), + "filepath": cls.generate_avro_file(simple_schema_str, "test_file_zero_rows", 0), + "num_records": 0, + "inferred_schema": master_schema, + "line_checks": {}, + "fails": [], + } + + # test for avro schema with nested records. This will pass as all nested records are returned as one string + cases["test_nested_records"] = { + "AbstractFileParser": AvroParser(format=cls.filetype), + "filepath": cls.generate_avro_file(nested_records_schema_str, "test_nested_records", 0), + "num_records": 0, + "inferred_schema": nested_schema_output, + "line_checks": {}, + "fails": [], + } + + return cases diff --git a/docs/integrations/sources/s3.md b/docs/integrations/sources/s3.md index f4e664d4bcba8..8e766f2d8c632 100644 --- a/docs/integrations/sources/s3.md +++ b/docs/integrations/sources/s3.md @@ -188,11 +188,15 @@ Apache Parquet file is a column-oriented data storage format of the Apache Hadoo You can find details on [here](https://arrow.apache.org/docs/python/generated/pyarrow.parquet.ParquetFile.html#pyarrow.parquet.ParquetFile.iter_batches). +### Avro + +The avro parser uses [fastavro](https://fastavro.readthedocs.io/en/latest/). Currently, no additional options are supported. ## Changelog | Version | Date | Pull Request | Subject | |:--------| :--- | :--- | :--- | +| 0.1.12 | 2022-05-11 | [12602](https://github.com/airbytehq/airbyte/pull/12602) | Added support for Avro file format | | 0.1.11 | 2022-04-30 | [12500](https://github.com/airbytehq/airbyte/pull/12500) | Improve input configuration copy | | 0.1.10 | 2022-01-28 | [8252](https://github.com/airbytehq/airbyte/pull/8252) | Refactoring of files' metadata | | 0.1.9 | 2022-01-06 | [9163](https://github.com/airbytehq/airbyte/pull/9163) | Work-around for web-UI, `backslash - t` converts to `tab` for `format.delimiter` field. | From 8d7e99fa156a5023837d38a12b7d73e9bcf80325 Mon Sep 17 00:00:00 2001 From: Anne <102554163+alovew@users.noreply.github.com> Date: Wed, 11 May 2022 09:59:01 -0700 Subject: [PATCH 23/55] apply jacoco (#12755) --- build.gradle | 36 ++++++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/build.gradle b/build.gradle index ff8918dcf8e9f..6f0c92de574fd 100644 --- a/build.gradle +++ b/build.gradle @@ -215,6 +215,37 @@ subprojects { ruleSetFiles = files(rootProject.file('tools/gradle/pmd/rules.xml')) } + jacoco { + toolVersion = "0.8.7" + } + + jacocoTestReport { + dependsOn test + reports { + html.required = true + xml.required = true + csv.required = false + } + } + + jacocoTestCoverageVerification { + violationRules { + failOnViolation = false + rule { + element = 'CLASS' + excludes = ['**/*Test*', '**/generated*'] + limit { + counter = 'BRANCH' + minimum = 0.8 + } + limit { + counter = 'INSTRUCTION' + minimum = 0.8 + } + } + } + } + def integrationTagName = 'platform-integration' def slowIntegrationTagName = 'platform-slow-integration' // make tag accessible in submodules. @@ -223,6 +254,10 @@ subprojects { } test { + jacoco { + enabled = true + excludes = ['**/*Test*', '**/generated*'] + } useJUnitPlatform { excludeTags(integrationTagName, slowIntegrationTagName, cloudStorageTestTagName) } @@ -331,6 +366,7 @@ subprojects { } javadoc.options.addStringOption('Xdoclint:none', '-quiet') + check.dependsOn 'jacocoTestCoverageVerification' } task('generate') { From 570770c10ba06a3c3ed11afb2cb6e258d30a8db5 Mon Sep 17 00:00:00 2001 From: a-honcharenko <97160705+a-honcharenko@users.noreply.github.com> Date: Wed, 11 May 2022 20:07:13 +0300 Subject: [PATCH 24/55] A honcharenko/multiarch build 801 (#12570) * added multiarch image publishing/modified dockerfiles * added new ami * changed version to test * rollback version * check version test * env vars temp fix * apt-utils error fix * disabled failed test * remove excluded tests * Excluded :airbyte-db:lib:test * Excluded :airbyte-db:lib:test * static jdk version for test * ok test * ok test * ok test * test vars * quemu issue fix * Returned version vars * Scripts update * Version vars change * Comment fix * Added comments, minor changes and comments in dockerfiles * Uncomment line to push images --- .github/actions/start-aws-runner/action.yml | 2 +- airbyte-container-orchestrator/Dockerfile | 11 +++--- airbyte-workers/Dockerfile | 9 +++-- tools/bin/publish_docker.sh | 43 +++++++++++++++++++++ tools/bin/release_version.sh | 6 ++- 5 files changed, 60 insertions(+), 11 deletions(-) create mode 100755 tools/bin/publish_docker.sh diff --git a/.github/actions/start-aws-runner/action.yml b/.github/actions/start-aws-runner/action.yml index a556ecee50d1a..7f268783fc608 100644 --- a/.github/actions/start-aws-runner/action.yml +++ b/.github/actions/start-aws-runner/action.yml @@ -9,7 +9,7 @@ inputs: required: true ec2-image-id: # github-self-hosted-runner-ubuntu-20-100g-disk-with-cypress-deps - default: "ami-08927c058921b27f4" + default: "ami-0f23be2f917510c26" required: true ec2-instance-type: default: "c5.2xlarge" diff --git a/airbyte-container-orchestrator/Dockerfile b/airbyte-container-orchestrator/Dockerfile index 22ad887ca064e..19408492b236e 100644 --- a/airbyte-container-orchestrator/Dockerfile +++ b/airbyte-container-orchestrator/Dockerfile @@ -12,10 +12,11 @@ RUN apt-get update && apt-get install -y \ gnupg-agent \ software-properties-common RUN curl -fsSL https://download.docker.com/linux/debian/gpg | apt-key add - -RUN add-apt-repository \ - "deb [arch=${DOCKER_BUILD_ARCH}] https://download.docker.com/linux/debian \ - $(lsb_release -cs) \ - stable" +# arch var used to detect architecture of container. Architecture should be spcified to get proper binaries from repo. +RUN arch=$(dpkg --print-architecture) && \ + add-apt-repository \ + "deb [arch=${arch}] https://download.docker.com/linux/debian \ + $(lsb_release -cs) stable" RUN apt-get update && apt-get install -y docker-ce-cli jq # Install kubectl for copying files to kube pods. Eventually should be replaced with a kube java client. @@ -23,7 +24,7 @@ RUN apt-get update && apt-get install -y docker-ce-cli jq # The following commands were taken from https://kubernetes.io/docs/tasks/tools/install-kubectl-linux/#install-using-native-package-management RUN curl -fsSLo /usr/share/keyrings/kubernetes-archive-keyring.gpg https://packages.cloud.google.com/apt/doc/apt-key.gpg RUN echo "deb [signed-by=/usr/share/keyrings/kubernetes-archive-keyring.gpg] https://apt.kubernetes.io/ kubernetes-xenial main" | tee /etc/apt/sources.list.d/kubernetes.list -RUN apt-get update && apt-get install -y kubectl +RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y kubectl # Don't change this manually. Bump version expects to make moves based on this string ARG VERSION=0.38.1-alpha diff --git a/airbyte-workers/Dockerfile b/airbyte-workers/Dockerfile index 12d6386770837..17e05a6163856 100644 --- a/airbyte-workers/Dockerfile +++ b/airbyte-workers/Dockerfile @@ -12,10 +12,11 @@ RUN apt-get update && apt-get install -y \ gnupg-agent \ software-properties-common RUN curl -fsSL https://download.docker.com/linux/debian/gpg | apt-key add - -RUN add-apt-repository \ - "deb [arch=${DOCKER_BUILD_ARCH}] https://download.docker.com/linux/debian \ - $(lsb_release -cs) \ - stable" +# arch var used to detect architecture of container. Architecture should be spcified to get proper binaries from repo. +RUN arch=$(dpkg --print-architecture) && \ + add-apt-repository \ + "deb [arch=${arch}] https://download.docker.com/linux/debian \ + $(lsb_release -cs) stable" RUN apt-get update && apt-get install -y docker-ce-cli jq # Install kubectl for copying files to kube pods. Eventually should be replaced with a kube java client. diff --git a/tools/bin/publish_docker.sh b/tools/bin/publish_docker.sh new file mode 100755 index 0000000000000..cb0950977ac0e --- /dev/null +++ b/tools/bin/publish_docker.sh @@ -0,0 +1,43 @@ +#!/bin/bash +set -e + +# List of directories without "airbyte-" prefix. +projectDir=( + "workers" + "cli" + "webapp" + "server" + "temporal" + "container-orchestrator" + "config/init" + "bootloader" + "metrics/reporter" + "db/lib" + "scheduler/app" +) + +# Set default values to required vars. If set in env, values will be taken from there. +JDK_VERSION=${JDK_VERSION:-17.0.1} +ALPINE_IMAGE=${ALPINE_IMAGE:-alpine:3.14} +POSTGRES_IMAGE=${POSTGRES_IMAGE:-postgres:13-alpine} + +# Iterate over all directories in list to build one by one. +# metrics-reporter are exception due to wrong artifact naming +for workdir in "${projectDir[@]}" + do + if [ $workdir = "metrics/reporter" ]; then + artifactName="metrics-reporter" + else + artifactName=${workdir%/*} + fi + docker buildx create --use --name $artifactName && \ + docker buildx build -t "airbyte/$artifactName:$VERSION" \ + --platform linux/amd64,linux/arm64 \ + --build-arg VERSION=$VERSION \ + --build-arg ALPINE_IMAGE=$ALPINE_IMAGE \ + --build-arg POSTGRES_IMAGE=$POSTGRES_IMAGE \ + --build-arg JDK_VERSION=$JDK_VERSION \ + --push \ + airbyte-$workdir/build/docker + docker buildx rm $artifactName +done diff --git a/tools/bin/release_version.sh b/tools/bin/release_version.sh index e5e49f84768d5..eee7012f318d6 100755 --- a/tools/bin/release_version.sh +++ b/tools/bin/release_version.sh @@ -26,5 +26,9 @@ source ./tools/bin/bump_version.sh echo "Building and publishing PLATFORM version $NEW_VERSION for git revision $GIT_REVISION..." VERSION=$NEW_VERSION SUB_BUILD=PLATFORM ./gradlew clean build VERSION=$NEW_VERSION SUB_BUILD=PLATFORM ./gradlew publish -VERSION=$NEW_VERSION GIT_REVISION=$GIT_REVISION docker-compose -f docker-compose.build.yaml push + +# Container should be running before build starts +# It generates binaries to build images for different CPU architecture +docker run --rm --privileged multiarch/qemu-user-static --reset -p yes +VERSION=$NEW_VERSION ./tools/bin/publish_docker.sh echo "Completed building and publishing PLATFORM..." From 7b43e39219f36636183145c69b9909c1024624cd Mon Sep 17 00:00:00 2001 From: a-honcharenko <97160705+a-honcharenko@users.noreply.github.com> Date: Wed, 11 May 2022 21:17:54 +0300 Subject: [PATCH 25/55] Added init exception to script (#12783) --- tools/bin/publish_docker.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tools/bin/publish_docker.sh b/tools/bin/publish_docker.sh index cb0950977ac0e..ebbe4748e2aff 100755 --- a/tools/bin/publish_docker.sh +++ b/tools/bin/publish_docker.sh @@ -27,6 +27,8 @@ for workdir in "${projectDir[@]}" do if [ $workdir = "metrics/reporter" ]; then artifactName="metrics-reporter" + elif [ $workdir = "config/init" ]; then + artifactName="init" else artifactName=${workdir%/*} fi From 225aecd37c4b5d41e9c16e3f2619a138c3067037 Mon Sep 17 00:00:00 2001 From: Serhii Lazebnyi <53845333+lazebnyi@users.noreply.github.com> Date: Wed, 11 May 2022 21:21:54 +0300 Subject: [PATCH 26/55] =?UTF-8?q?=F0=9F=90=9BSource=20Amazon=20S3:=20Fixed?= =?UTF-8?q?=20empty=20options=20issue=20(#12730)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Fixed empty oprions issue * Update airbyte-integrations/connectors/source-s3/source_s3/utils.py Co-authored-by: Denis Davydov * Bumped version * Fix typo * Bumped seed version * Fix changelog * Bumped version in docker file * auto-bump connector version Co-authored-by: Denis Davydov Co-authored-by: Octavia Squidington III --- .../resources/seed/source_definitions.yaml | 2 +- .../src/main/resources/seed/source_specs.yaml | 2 +- .../connectors/source-s3/Dockerfile | 2 +- .../formats/csv_parser.py | 9 ++++--- .../connectors/source-s3/source_s3/utils.py | 4 +++ .../source-s3/unit_tests/test_csv_parser.py | 27 +++++++++++++++++++ docs/integrations/sources/s3.md | 3 ++- 7 files changed, 41 insertions(+), 8 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index 7e2823e6863a7..b6c8a7f28c06b 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -779,7 +779,7 @@ - name: S3 sourceDefinitionId: 69589781-7828-43c5-9f63-8925b1c1ccc2 dockerRepository: airbyte/source-s3 - dockerImageTag: 0.1.12 + dockerImageTag: 0.1.13 documentationUrl: https://docs.airbyte.io/integrations/sources/s3 icon: s3.svg sourceType: file diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index c1adfc02f6e3d..82cf9c9a1420a 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -7144,7 +7144,7 @@ path_in_connector_config: - "credentials" - "client_secret" -- dockerImage: "airbyte/source-s3:0.1.12" +- dockerImage: "airbyte/source-s3:0.1.13" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/s3" changelogUrl: "https://docs.airbyte.io/integrations/sources/s3" diff --git a/airbyte-integrations/connectors/source-s3/Dockerfile b/airbyte-integrations/connectors/source-s3/Dockerfile index aed9ea734e425..003cef069e1ac 100644 --- a/airbyte-integrations/connectors/source-s3/Dockerfile +++ b/airbyte-integrations/connectors/source-s3/Dockerfile @@ -17,5 +17,5 @@ COPY source_s3 ./source_s3 ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.12 +LABEL io.airbyte.version=0.1.13 LABEL io.airbyte.name=airbyte/source-s3 diff --git a/airbyte-integrations/connectors/source-s3/source_s3/source_files_abstract/formats/csv_parser.py b/airbyte-integrations/connectors/source-s3/source_s3/source_files_abstract/formats/csv_parser.py index 1b98827553136..112b2cd781a2c 100644 --- a/airbyte-integrations/connectors/source-s3/source_s3/source_files_abstract/formats/csv_parser.py +++ b/airbyte-integrations/connectors/source-s3/source_s3/source_files_abstract/formats/csv_parser.py @@ -11,7 +11,7 @@ import pyarrow as pa import six # type: ignore[import] from pyarrow import csv as pa_csv -from source_s3.utils import run_in_external_process +from source_s3.utils import get_value_or_json_if_empty_string, run_in_external_process from .abstract_file_parser import AbstractFileParser from .csv_spec import CsvFormat @@ -40,9 +40,10 @@ def _read_options(self) -> Mapping[str, str]: https://arrow.apache.org/docs/python/generated/pyarrow.csv.ReadOptions.html build ReadOptions object like: pa.csv.ReadOptions(**self._read_options()) """ + advanced_options = get_value_or_json_if_empty_string(self.format.advanced_options) return { **{"block_size": self.format.block_size, "encoding": self.format.encoding}, - **json.loads(self.format.advanced_options), + **json.loads(advanced_options), } def _parse_options(self) -> Mapping[str, str]: @@ -66,11 +67,11 @@ def _convert_options(self, json_schema: Mapping[str, Any] = None) -> Mapping[str :param json_schema: if this is passed in, pyarrow will attempt to enforce this schema on read, defaults to None """ check_utf8 = self.format.encoding.lower().replace("-", "") == "utf8" - + additional_reader_options = get_value_or_json_if_empty_string(self.format.additional_reader_options) convert_schema = self.json_schema_to_pyarrow_schema(json_schema) if json_schema is not None else None return { **{"check_utf8": check_utf8, "column_types": convert_schema}, - **json.loads(self.format.additional_reader_options), + **json.loads(additional_reader_options), } def get_inferred_schema(self, file: Union[TextIO, BinaryIO]) -> Mapping[str, Any]: diff --git a/airbyte-integrations/connectors/source-s3/source_s3/utils.py b/airbyte-integrations/connectors/source-s3/source_s3/utils.py index a9e5668ba4e43..ddfec7aa469a4 100644 --- a/airbyte-integrations/connectors/source-s3/source_s3/utils.py +++ b/airbyte-integrations/connectors/source-s3/source_s3/utils.py @@ -48,3 +48,7 @@ def run_in_external_process(fn: Callable, timeout: int, max_timeout: int, logger def multiprocess_queuer(func: Callable, queue: mp.Queue, *args: Any, **kwargs: Any) -> None: """this is our multiprocesser helper function, lives at top-level to be Windows-compatible""" queue.put(dill.loads(func)(*args, **kwargs)) + + +def get_value_or_json_if_empty_string(options: str) -> str: + return options.strip() or "{}" diff --git a/airbyte-integrations/connectors/source-s3/unit_tests/test_csv_parser.py b/airbyte-integrations/connectors/source-s3/unit_tests/test_csv_parser.py index f647e5f295f7f..612dd86ec3a15 100644 --- a/airbyte-integrations/connectors/source-s3/unit_tests/test_csv_parser.py +++ b/airbyte-integrations/connectors/source-s3/unit_tests/test_csv_parser.py @@ -331,6 +331,33 @@ def cases(cls) -> Mapping[str, Any]: "line_checks": {}, "fails": ["test_get_inferred_schema", "test_stream_records"], }, + "empty_advanced_options": { + "AbstractFileParser": CsvParser( + format={"filetype": "csv", "advanced_options": ""}, + master_schema={ + "id": "integer", + "name": "string", + "valid": "boolean", + "code": "integer", + "degrees": "number", + "birthday": "string", + "last_seen": "string", + }, + ), + "filepath": os.path.join(SAMPLE_DIRECTORY, "csv/test_file_1.csv"), + "num_records": 8, + "inferred_schema": { + "id": "integer", + "name": "string", + "valid": "boolean", + "code": "integer", + "degrees": "number", + "birthday": "string", + "last_seen": "string", + }, + "line_checks": {}, + "fails": [], + }, "no_header_csv_file": { # no header test "AbstractFileParser": CsvParser( diff --git a/docs/integrations/sources/s3.md b/docs/integrations/sources/s3.md index 8e766f2d8c632..cb994c1d7bd91 100644 --- a/docs/integrations/sources/s3.md +++ b/docs/integrations/sources/s3.md @@ -196,8 +196,9 @@ The avro parser uses [fastavro](https://fastavro.readthedocs.io/en/latest/). Cur | Version | Date | Pull Request | Subject | |:--------| :--- | :--- | :--- | +| 0.1.13 | 2022-05-11 | [12730](https://github.com/airbytehq/airbyte/pull/12730) | Fixed empty options issue | | 0.1.12 | 2022-05-11 | [12602](https://github.com/airbytehq/airbyte/pull/12602) | Added support for Avro file format | -| 0.1.11 | 2022-04-30 | [12500](https://github.com/airbytehq/airbyte/pull/12500) | Improve input configuration copy | +| 0.1.11 | 2022-04-30 | [12500](https://github.com/airbytehq/airbyte/pull/12500) | Improve input configuration copy | | 0.1.10 | 2022-01-28 | [8252](https://github.com/airbytehq/airbyte/pull/8252) | Refactoring of files' metadata | | 0.1.9 | 2022-01-06 | [9163](https://github.com/airbytehq/airbyte/pull/9163) | Work-around for web-UI, `backslash - t` converts to `tab` for `format.delimiter` field. | | 0.1.7 | 2021-11-08 | [7499](https://github.com/airbytehq/airbyte/pull/7499) | Remove base-python dependencies | From f654917335e21da97873729e2fd6f8e3ca2d5222 Mon Sep 17 00:00:00 2001 From: Serhii Chvaliuk Date: Wed, 11 May 2022 22:00:18 +0300 Subject: [PATCH 27/55] switch amazon-ads to beta (#12688) Signed-off-by: Sergey Chvalyuk --- .../init/src/main/resources/seed/source_definitions.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index b6c8a7f28c06b..381cae9d39de6 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -21,7 +21,7 @@ documentationUrl: https://docs.airbyte.io/integrations/sources/amazon-ads icon: amazonads.svg sourceType: api - releaseStage: alpha + releaseStage: beta - name: Amazon Seller Partner sourceDefinitionId: e55879a8-0ef8-4557-abcf-ab34c53ec460 dockerRepository: airbyte/source-amazon-seller-partner From 852528533d106ed133e50ccf0cc30d42e7640c58 Mon Sep 17 00:00:00 2001 From: a-honcharenko <97160705+a-honcharenko@users.noreply.github.com> Date: Wed, 11 May 2022 22:20:27 +0300 Subject: [PATCH 28/55] Fix naming in script (#12786) * Artifact naming fix * Indent properly --- tools/bin/publish_docker.sh | 25 ++++++++++++++++++------- 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/tools/bin/publish_docker.sh b/tools/bin/publish_docker.sh index ebbe4748e2aff..f46c76a7c42e0 100755 --- a/tools/bin/publish_docker.sh +++ b/tools/bin/publish_docker.sh @@ -25,13 +25,24 @@ POSTGRES_IMAGE=${POSTGRES_IMAGE:-postgres:13-alpine} # metrics-reporter are exception due to wrong artifact naming for workdir in "${projectDir[@]}" do - if [ $workdir = "metrics/reporter" ]; then - artifactName="metrics-reporter" - elif [ $workdir = "config/init" ]; then - artifactName="init" - else - artifactName=${workdir%/*} - fi + case $workdir in + "metrics/reporter") + artifactName="metrics-reporter" + ;; + + "config/init") + artifactName="init" + ;; + + "workers") + artifactName="worker" + ;; + + *) + artifactName=${workdir%/*} + ;; + esac + docker buildx create --use --name $artifactName && \ docker buildx build -t "airbyte/$artifactName:$VERSION" \ --platform linux/amd64,linux/arm64 \ From ffd4a01f3e02a2adb6f16214eed784907ca98c33 Mon Sep 17 00:00:00 2001 From: Octavia Squidington III <90398440+octavia-squidington-iii@users.noreply.github.com> Date: Wed, 11 May 2022 16:50:26 -0300 Subject: [PATCH 29/55] Bump Airbyte version from 0.38.1-alpha to 0.38.2-alpha (#12789) Co-authored-by: a-honcharenko --- .bumpversion.cfg | 2 +- .env | 2 +- airbyte-bootloader/Dockerfile | 2 +- airbyte-container-orchestrator/Dockerfile | 2 +- airbyte-metrics/reporter/Dockerfile | 2 +- airbyte-scheduler/app/Dockerfile | 2 +- airbyte-server/Dockerfile | 2 +- airbyte-webapp/package-lock.json | 4 ++-- airbyte-webapp/package.json | 2 +- airbyte-workers/Dockerfile | 2 +- charts/airbyte/Chart.yaml | 2 +- charts/airbyte/README.md | 10 +++++----- charts/airbyte/values.yaml | 10 +++++----- docs/operator-guides/upgrading-airbyte.md | 2 +- kube/overlays/stable-with-resource-limits/.env | 2 +- .../stable-with-resource-limits/kustomization.yaml | 12 ++++++------ kube/overlays/stable/.env | 2 +- kube/overlays/stable/kustomization.yaml | 12 ++++++------ octavia-cli/Dockerfile | 2 +- octavia-cli/README.md | 2 +- octavia-cli/install.sh | 2 +- octavia-cli/setup.py | 2 +- 22 files changed, 41 insertions(+), 41 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 6d106cc3f50f7..47ae7b955ee14 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.38.1-alpha +current_version = 0.38.2-alpha commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\-[a-z]+)? diff --git a/.env b/.env index 311f60ef10a9d..a24e7001a6d25 100644 --- a/.env +++ b/.env @@ -10,7 +10,7 @@ ### SHARED ### -VERSION=0.38.1-alpha +VERSION=0.38.2-alpha # When using the airbyte-db via default docker image CONFIG_ROOT=/data diff --git a/airbyte-bootloader/Dockerfile b/airbyte-bootloader/Dockerfile index f272323d3ff8f..f4d862fdb6f01 100644 --- a/airbyte-bootloader/Dockerfile +++ b/airbyte-bootloader/Dockerfile @@ -1,7 +1,7 @@ ARG JDK_VERSION=17.0.1 FROM openjdk:${JDK_VERSION}-slim -ARG VERSION=0.38.1-alpha +ARG VERSION=0.38.2-alpha ENV APPLICATION airbyte-bootloader ENV VERSION ${VERSION} diff --git a/airbyte-container-orchestrator/Dockerfile b/airbyte-container-orchestrator/Dockerfile index 19408492b236e..20ca9ac800af7 100644 --- a/airbyte-container-orchestrator/Dockerfile +++ b/airbyte-container-orchestrator/Dockerfile @@ -27,7 +27,7 @@ RUN echo "deb [signed-by=/usr/share/keyrings/kubernetes-archive-keyring.gpg] htt RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y kubectl # Don't change this manually. Bump version expects to make moves based on this string -ARG VERSION=0.38.1-alpha +ARG VERSION=0.38.2-alpha ENV APPLICATION airbyte-container-orchestrator ENV VERSION=${VERSION} diff --git a/airbyte-metrics/reporter/Dockerfile b/airbyte-metrics/reporter/Dockerfile index 99c710f56792d..6b508f3ab802d 100644 --- a/airbyte-metrics/reporter/Dockerfile +++ b/airbyte-metrics/reporter/Dockerfile @@ -1,7 +1,7 @@ ARG JDK_VERSION=17.0.1 FROM openjdk:${JDK_VERSION}-slim AS metrics-reporter -ARG VERSION=0.38.1-alpha +ARG VERSION=0.38.2-alpha ENV APPLICATION airbyte-metrics-reporter ENV VERSION ${VERSION} diff --git a/airbyte-scheduler/app/Dockerfile b/airbyte-scheduler/app/Dockerfile index e5dcb4bd5e622..317910cf6a5e6 100644 --- a/airbyte-scheduler/app/Dockerfile +++ b/airbyte-scheduler/app/Dockerfile @@ -1,7 +1,7 @@ ARG JDK_VERSION=17.0.1 FROM openjdk:${JDK_VERSION}-slim AS scheduler -ARG VERSION=0.38.1-alpha +ARG VERSION=0.38.2-alpha ENV APPLICATION airbyte-scheduler ENV VERSION ${VERSION} diff --git a/airbyte-server/Dockerfile b/airbyte-server/Dockerfile index f4f4fb3e3e33b..0c8533002e1d7 100644 --- a/airbyte-server/Dockerfile +++ b/airbyte-server/Dockerfile @@ -3,7 +3,7 @@ FROM openjdk:${JDK_VERSION}-slim AS server EXPOSE 8000 -ARG VERSION=0.38.1-alpha +ARG VERSION=0.38.2-alpha ENV APPLICATION airbyte-server ENV VERSION ${VERSION} diff --git a/airbyte-webapp/package-lock.json b/airbyte-webapp/package-lock.json index 74f4ec270e530..3bd4a4ffcfb57 100644 --- a/airbyte-webapp/package-lock.json +++ b/airbyte-webapp/package-lock.json @@ -1,12 +1,12 @@ { "name": "airbyte-webapp", - "version": "0.38.1-alpha", + "version": "0.38.2-alpha", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "airbyte-webapp", - "version": "0.38.1-alpha", + "version": "0.38.2-alpha", "dependencies": { "@fortawesome/fontawesome-svg-core": "^6.1.1", "@fortawesome/free-brands-svg-icons": "^6.1.1", diff --git a/airbyte-webapp/package.json b/airbyte-webapp/package.json index cce629bc10185..d87295dc6ec73 100644 --- a/airbyte-webapp/package.json +++ b/airbyte-webapp/package.json @@ -1,6 +1,6 @@ { "name": "airbyte-webapp", - "version": "0.38.1-alpha", + "version": "0.38.2-alpha", "private": true, "engines": { "node": ">=16.0.0" diff --git a/airbyte-workers/Dockerfile b/airbyte-workers/Dockerfile index 17e05a6163856..dd0908c01d856 100644 --- a/airbyte-workers/Dockerfile +++ b/airbyte-workers/Dockerfile @@ -26,7 +26,7 @@ RUN curl -fsSLo /usr/share/keyrings/kubernetes-archive-keyring.gpg https://packa RUN echo "deb [signed-by=/usr/share/keyrings/kubernetes-archive-keyring.gpg] https://apt.kubernetes.io/ kubernetes-xenial main" | tee /etc/apt/sources.list.d/kubernetes.list RUN apt-get update && apt-get install -y kubectl -ARG VERSION=0.38.1-alpha +ARG VERSION=0.38.2-alpha ENV APPLICATION airbyte-workers ENV VERSION ${VERSION} diff --git a/charts/airbyte/Chart.yaml b/charts/airbyte/Chart.yaml index 6f2e30de853de..7ef9db404ad47 100644 --- a/charts/airbyte/Chart.yaml +++ b/charts/airbyte/Chart.yaml @@ -21,7 +21,7 @@ version: 0.3.2 # incremented each time you make changes to the application. Versions are not expected to # follow Semantic Versioning. They should reflect the version the application is using. # It is recommended to use it with quotes. -appVersion: "0.38.1-alpha" +appVersion: "0.38.2-alpha" dependencies: - name: common diff --git a/charts/airbyte/README.md b/charts/airbyte/README.md index 709ac8068d334..284c5b85e9452 100644 --- a/charts/airbyte/README.md +++ b/charts/airbyte/README.md @@ -30,7 +30,7 @@ Helm charts for Airbyte. | `webapp.replicaCount` | Number of webapp replicas | `1` | | `webapp.image.repository` | The repository to use for the airbyte webapp image. | `airbyte/webapp` | | `webapp.image.pullPolicy` | the pull policy to use for the airbyte webapp image | `IfNotPresent` | -| `webapp.image.tag` | The airbyte webapp image tag. Defaults to the chart's AppVersion | `0.38.1-alpha` | +| `webapp.image.tag` | The airbyte webapp image tag. Defaults to the chart's AppVersion | `0.38.2-alpha` | | `webapp.podAnnotations` | Add extra annotations to the webapp pod(s) | `{}` | | `webapp.containerSecurityContext` | Security context for the container | `{}` | | `webapp.livenessProbe.enabled` | Enable livenessProbe on the webapp | `true` | @@ -72,7 +72,7 @@ Helm charts for Airbyte. | `scheduler.replicaCount` | Number of scheduler replicas | `1` | | `scheduler.image.repository` | The repository to use for the airbyte scheduler image. | `airbyte/scheduler` | | `scheduler.image.pullPolicy` | the pull policy to use for the airbyte scheduler image | `IfNotPresent` | -| `scheduler.image.tag` | The airbyte scheduler image tag. Defaults to the chart's AppVersion | `0.38.1-alpha` | +| `scheduler.image.tag` | The airbyte scheduler image tag. Defaults to the chart's AppVersion | `0.38.2-alpha` | | `scheduler.podAnnotations` | Add extra annotations to the scheduler pod | `{}` | | `scheduler.containerSecurityContext` | Security context for the container | `{}` | | `scheduler.livenessProbe.enabled` | Enable livenessProbe on the scheduler | `true` | @@ -135,7 +135,7 @@ Helm charts for Airbyte. | `server.replicaCount` | Number of server replicas | `1` | | `server.image.repository` | The repository to use for the airbyte server image. | `airbyte/server` | | `server.image.pullPolicy` | the pull policy to use for the airbyte server image | `IfNotPresent` | -| `server.image.tag` | The airbyte server image tag. Defaults to the chart's AppVersion | `0.38.1-alpha` | +| `server.image.tag` | The airbyte server image tag. Defaults to the chart's AppVersion | `0.38.2-alpha` | | `server.podAnnotations` | Add extra annotations to the server pod | `{}` | | `server.containerSecurityContext` | Security context for the container | `{}` | | `server.livenessProbe.enabled` | Enable livenessProbe on the server | `true` | @@ -170,7 +170,7 @@ Helm charts for Airbyte. | `worker.replicaCount` | Number of worker replicas | `1` | | `worker.image.repository` | The repository to use for the airbyte worker image. | `airbyte/worker` | | `worker.image.pullPolicy` | the pull policy to use for the airbyte worker image | `IfNotPresent` | -| `worker.image.tag` | The airbyte worker image tag. Defaults to the chart's AppVersion | `0.38.1-alpha` | +| `worker.image.tag` | The airbyte worker image tag. Defaults to the chart's AppVersion | `0.38.2-alpha` | | `worker.podAnnotations` | Add extra annotations to the worker pod(s) | `{}` | | `worker.containerSecurityContext` | Security context for the container | `{}` | | `worker.livenessProbe.enabled` | Enable livenessProbe on the worker | `true` | @@ -202,7 +202,7 @@ Helm charts for Airbyte. | ----------------------------- | -------------------------------------------------------------------- | -------------------- | | `bootloader.image.repository` | The repository to use for the airbyte bootloader image. | `airbyte/bootloader` | | `bootloader.image.pullPolicy` | the pull policy to use for the airbyte bootloader image | `IfNotPresent` | -| `bootloader.image.tag` | The airbyte bootloader image tag. Defaults to the chart's AppVersion | `0.38.1-alpha` | +| `bootloader.image.tag` | The airbyte bootloader image tag. Defaults to the chart's AppVersion | `0.38.2-alpha` | | `bootloader.podAnnotations` | Add extra annotations to the bootloader pod | `{}` | | `bootloader.nodeSelector` | Node labels for pod assignment | `{}` | | `bootloader.tolerations` | Tolerations for worker pod assignment. | `[]` | diff --git a/charts/airbyte/values.yaml b/charts/airbyte/values.yaml index 5ded7bfc76cf7..4f65af52cc6b9 100644 --- a/charts/airbyte/values.yaml +++ b/charts/airbyte/values.yaml @@ -41,7 +41,7 @@ webapp: image: repository: airbyte/webapp pullPolicy: IfNotPresent - tag: 0.38.1-alpha + tag: 0.38.2-alpha ## @param webapp.podAnnotations [object] Add extra annotations to the webapp pod(s) ## @@ -207,7 +207,7 @@ scheduler: image: repository: airbyte/scheduler pullPolicy: IfNotPresent - tag: 0.38.1-alpha + tag: 0.38.2-alpha ## @param scheduler.podAnnotations [object] Add extra annotations to the scheduler pod ## @@ -438,7 +438,7 @@ server: image: repository: airbyte/server pullPolicy: IfNotPresent - tag: 0.38.1-alpha + tag: 0.38.2-alpha ## @param server.podAnnotations [object] Add extra annotations to the server pod ## @@ -565,7 +565,7 @@ worker: image: repository: airbyte/worker pullPolicy: IfNotPresent - tag: 0.38.1-alpha + tag: 0.38.2-alpha ## @param worker.podAnnotations [object] Add extra annotations to the worker pod(s) ## @@ -683,7 +683,7 @@ bootloader: image: repository: airbyte/bootloader pullPolicy: IfNotPresent - tag: 0.38.1-alpha + tag: 0.38.2-alpha ## @param bootloader.podAnnotations [object] Add extra annotations to the bootloader pod ## diff --git a/docs/operator-guides/upgrading-airbyte.md b/docs/operator-guides/upgrading-airbyte.md index 9371aef650a9c..bd9ed4ce925d9 100644 --- a/docs/operator-guides/upgrading-airbyte.md +++ b/docs/operator-guides/upgrading-airbyte.md @@ -103,7 +103,7 @@ If you are upgrading from (i.e. your current version of Airbyte is) Airbyte vers Here's an example of what it might look like with the values filled in. It assumes that the downloaded `airbyte_archive.tar.gz` is in `/tmp`. ```bash - docker run --rm -v /tmp:/config airbyte/migration:0.38.1-alpha --\ + docker run --rm -v /tmp:/config airbyte/migration:0.38.2-alpha --\ --input /config/airbyte_archive.tar.gz\ --output /config/airbyte_archive_migrated.tar.gz ``` diff --git a/kube/overlays/stable-with-resource-limits/.env b/kube/overlays/stable-with-resource-limits/.env index b9c1a6e7ab7e5..a706a1276a946 100644 --- a/kube/overlays/stable-with-resource-limits/.env +++ b/kube/overlays/stable-with-resource-limits/.env @@ -1,4 +1,4 @@ -AIRBYTE_VERSION=0.38.1-alpha +AIRBYTE_VERSION=0.38.2-alpha # Airbyte Internal Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db DATABASE_HOST=airbyte-db-svc diff --git a/kube/overlays/stable-with-resource-limits/kustomization.yaml b/kube/overlays/stable-with-resource-limits/kustomization.yaml index 7fb89e975407b..23045a8ddfc91 100644 --- a/kube/overlays/stable-with-resource-limits/kustomization.yaml +++ b/kube/overlays/stable-with-resource-limits/kustomization.yaml @@ -8,17 +8,17 @@ bases: images: - name: airbyte/db - newTag: 0.38.1-alpha + newTag: 0.38.2-alpha - name: airbyte/bootloader - newTag: 0.38.1-alpha + newTag: 0.38.2-alpha - name: airbyte/scheduler - newTag: 0.38.1-alpha + newTag: 0.38.2-alpha - name: airbyte/server - newTag: 0.38.1-alpha + newTag: 0.38.2-alpha - name: airbyte/webapp - newTag: 0.38.1-alpha + newTag: 0.38.2-alpha - name: airbyte/worker - newTag: 0.38.1-alpha + newTag: 0.38.2-alpha - name: temporalio/auto-setup newTag: 1.7.0 diff --git a/kube/overlays/stable/.env b/kube/overlays/stable/.env index e277a9be5bee9..55aea55ddafd7 100644 --- a/kube/overlays/stable/.env +++ b/kube/overlays/stable/.env @@ -1,4 +1,4 @@ -AIRBYTE_VERSION=0.38.1-alpha +AIRBYTE_VERSION=0.38.2-alpha # Airbyte Internal Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db DATABASE_HOST=airbyte-db-svc diff --git a/kube/overlays/stable/kustomization.yaml b/kube/overlays/stable/kustomization.yaml index cb4cb510b7d48..ac105343e6c99 100644 --- a/kube/overlays/stable/kustomization.yaml +++ b/kube/overlays/stable/kustomization.yaml @@ -8,17 +8,17 @@ bases: images: - name: airbyte/db - newTag: 0.38.1-alpha + newTag: 0.38.2-alpha - name: airbyte/bootloader - newTag: 0.38.1-alpha + newTag: 0.38.2-alpha - name: airbyte/scheduler - newTag: 0.38.1-alpha + newTag: 0.38.2-alpha - name: airbyte/server - newTag: 0.38.1-alpha + newTag: 0.38.2-alpha - name: airbyte/webapp - newTag: 0.38.1-alpha + newTag: 0.38.2-alpha - name: airbyte/worker - newTag: 0.38.1-alpha + newTag: 0.38.2-alpha - name: temporalio/auto-setup newTag: 1.7.0 diff --git a/octavia-cli/Dockerfile b/octavia-cli/Dockerfile index d025d9304e725..82139b526e988 100644 --- a/octavia-cli/Dockerfile +++ b/octavia-cli/Dockerfile @@ -14,5 +14,5 @@ USER octavia-cli WORKDIR /home/octavia-project ENTRYPOINT ["octavia"] -LABEL io.airbyte.version=0.38.1-alpha +LABEL io.airbyte.version=0.38.2-alpha LABEL io.airbyte.name=airbyte/octavia-cli diff --git a/octavia-cli/README.md b/octavia-cli/README.md index 74b9a04d8b530..9f63b84b83702 100644 --- a/octavia-cli/README.md +++ b/octavia-cli/README.md @@ -105,7 +105,7 @@ This script: ```bash touch ~/.octavia # Create a file to store env variables that will be mapped the octavia-cli container mkdir my_octavia_project_directory # Create your octavia project directory where YAML configurations will be stored. -docker run --name octavia-cli -i --rm -v my_octavia_project_directory:/home/octavia-project --network host --user $(id -u):$(id -g) --env-file ~/.octavia airbyte/octavia-cli:0.38.1-alpha +docker run --name octavia-cli -i --rm -v my_octavia_project_directory:/home/octavia-project --network host --user $(id -u):$(id -g) --env-file ~/.octavia airbyte/octavia-cli:0.38.2-alpha ``` ### Using `docker-compose` diff --git a/octavia-cli/install.sh b/octavia-cli/install.sh index 5f94727505cf6..130a21afdb1e4 100755 --- a/octavia-cli/install.sh +++ b/octavia-cli/install.sh @@ -3,7 +3,7 @@ # This install scripts currently only works for ZSH and Bash profiles. # It creates an octavia alias in your profile bound to a docker run command and your current user. -VERSION=0.38.1-alpha +VERSION=0.38.2-alpha OCTAVIA_ENV_FILE=${HOME}/.octavia detect_profile() { diff --git a/octavia-cli/setup.py b/octavia-cli/setup.py index a8fab03974232..8001e37c327be 100644 --- a/octavia-cli/setup.py +++ b/octavia-cli/setup.py @@ -15,7 +15,7 @@ setup( name="octavia-cli", - version="0.38.1", + version="0.38.2", description="A command line interface to manage Airbyte configurations", long_description=README, author="Airbyte", From 9126ebc4c32c205362b6617d8bcb241702c1a680 Mon Sep 17 00:00:00 2001 From: "Pedro S. Lopez" Date: Wed, 11 May 2022 16:05:14 -0400 Subject: [PATCH 30/55] SAT: add `threshold_days` incremental test option (#12715) * SAT: add `threshold_days` incremental test option * fix: support cursor values that are already dates * dont use constant value * update docs * use pendulum for date parsing * bump cdk version * use pendulum for duration * add support for unix timestamps * bump version, update changelog --- .../bases/source-acceptance-test/CHANGELOG.md | 4 + .../bases/source-acceptance-test/Dockerfile | 2 +- .../bases/source-acceptance-test/setup.py | 2 +- .../source_acceptance_test/config.py | 5 + .../tests/test_incremental.py | 44 ++++++- .../unit_tests/test_incremental.py | 112 ++++++++++++++++++ .../source-acceptance-tests-reference.md | 13 +- 7 files changed, 169 insertions(+), 13 deletions(-) create mode 100644 airbyte-integrations/bases/source-acceptance-test/unit_tests/test_incremental.py diff --git a/airbyte-integrations/bases/source-acceptance-test/CHANGELOG.md b/airbyte-integrations/bases/source-acceptance-test/CHANGELOG.md index 9301236d90ed6..b6fb971fe5bc8 100644 --- a/airbyte-integrations/bases/source-acceptance-test/CHANGELOG.md +++ b/airbyte-integrations/bases/source-acceptance-test/CHANGELOG.md @@ -1,5 +1,9 @@ # Changelog +## 0.1.51 +- Add `threshold_days` option for lookback window support in incremental tests. +- Update CDK to prevent warnings when encountering new `AirbyteTraceMessage`s. + ## 0.1.50 Added support for passing a `.yaml` file as `spec_path`. diff --git a/airbyte-integrations/bases/source-acceptance-test/Dockerfile b/airbyte-integrations/bases/source-acceptance-test/Dockerfile index 8ff75150da7f1..3351bd6eab6de 100644 --- a/airbyte-integrations/bases/source-acceptance-test/Dockerfile +++ b/airbyte-integrations/bases/source-acceptance-test/Dockerfile @@ -33,7 +33,7 @@ COPY pytest.ini setup.py ./ COPY source_acceptance_test ./source_acceptance_test RUN pip install . -LABEL io.airbyte.version=0.1.50 +LABEL io.airbyte.version=0.1.51 LABEL io.airbyte.name=airbyte/source-acceptance-test ENTRYPOINT ["python", "-m", "pytest", "-p", "source_acceptance_test.plugin", "-r", "fEsx"] diff --git a/airbyte-integrations/bases/source-acceptance-test/setup.py b/airbyte-integrations/bases/source-acceptance-test/setup.py index 1b487e137730a..28b53bf2dd053 100644 --- a/airbyte-integrations/bases/source-acceptance-test/setup.py +++ b/airbyte-integrations/bases/source-acceptance-test/setup.py @@ -6,7 +6,7 @@ import setuptools MAIN_REQUIREMENTS = [ - "airbyte-cdk~=0.1.25", + "airbyte-cdk~=0.1.56", "docker~=5.0.3", "PyYAML~=5.4", "icdiff~=1.9", diff --git a/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/config.py b/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/config.py index 6051aa81160b5..44c9e3d133655 100644 --- a/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/config.py +++ b/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/config.py @@ -105,6 +105,11 @@ class IncrementalConfig(BaseConfig): ) future_state_path: Optional[str] = Field(description="Path to a state file with values in far future") timeout_seconds: int = timeout_seconds + threshold_days: int = Field( + description="Allow records to be emitted with a cursor value this number of days before the state cursor", + default=0, + ge=0, + ) class TestConfig(BaseConfig): diff --git a/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/tests/test_incremental.py b/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/tests/test_incremental.py index dc9db26749c95..34b295991a6ba 100644 --- a/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/tests/test_incremental.py +++ b/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/tests/test_incremental.py @@ -2,15 +2,17 @@ # Copyright (c) 2021 Airbyte, Inc., all rights reserved. # - import json +from datetime import datetime from pathlib import Path from typing import Any, Iterable, Mapping, Tuple +import pendulum import pytest from airbyte_cdk.models import ConfiguredAirbyteCatalog, Type from source_acceptance_test import BaseTest -from source_acceptance_test.utils import ConnectorRunner, JsonSchemaHelper, filter_output, incremental_only_catalog +from source_acceptance_test.config import IncrementalConfig +from source_acceptance_test.utils import ConnectorRunner, JsonSchemaHelper, SecretDict, filter_output, incremental_only_catalog @pytest.fixture(name="future_state_path") @@ -76,9 +78,41 @@ def records_with_state(records, state, stream_mapping, state_cursor_paths) -> It yield record_value, state_value, stream_name +def compare_cursor_with_threshold(record_value, state_value, threshold_days: int) -> bool: + """ + Checks if the record's cursor value is older or equal to the state cursor value. + + If the threshold_days option is set, the values will be converted to dates so that the time-based offset can be applied. + :raises: pendulum.parsing.exceptions.ParserError: if threshold_days is passed with non-date cursor values. + """ + if threshold_days: + + def _parse_date_value(value) -> datetime: + if isinstance(value, datetime): + return value + if isinstance(value, (int, float)): + return pendulum.from_timestamp(value / 1000) + return pendulum.parse(value) + + record_date_value = _parse_date_value(record_value) + state_date_value = _parse_date_value(state_value) + + return record_date_value >= (state_date_value - pendulum.duration(days=threshold_days)) + + return record_value >= state_value + + @pytest.mark.default_timeout(20 * 60) class TestIncremental(BaseTest): - def test_two_sequential_reads(self, connector_config, configured_catalog_for_incremental, cursor_paths, docker_runner: ConnectorRunner): + def test_two_sequential_reads( + self, + inputs: IncrementalConfig, + connector_config: SecretDict, + configured_catalog_for_incremental: ConfiguredAirbyteCatalog, + cursor_paths: dict[str, list[str]], + docker_runner: ConnectorRunner, + ): + threshold_days = getattr(inputs, "threshold_days") or 0 stream_mapping = {stream.stream.name: stream for stream in configured_catalog_for_incremental.streams} output = docker_runner.call_read(connector_config, configured_catalog_for_incremental) @@ -98,8 +132,8 @@ def test_two_sequential_reads(self, connector_config, configured_catalog_for_inc records_2 = filter_output(output, type_=Type.RECORD) for record_value, state_value, stream_name in records_with_state(records_2, latest_state, stream_mapping, cursor_paths): - assert ( - record_value >= state_value + assert compare_cursor_with_threshold( + record_value, state_value, threshold_days ), f"Second incremental sync should produce records older or equal to cursor value from the state. Stream: {stream_name}" def test_state_with_abnormally_large_values(self, connector_config, configured_catalog, future_state, docker_runner: ConnectorRunner): diff --git a/airbyte-integrations/bases/source-acceptance-test/unit_tests/test_incremental.py b/airbyte-integrations/bases/source-acceptance-test/unit_tests/test_incremental.py new file mode 100644 index 0000000000000..150addea0f3a3 --- /dev/null +++ b/airbyte-integrations/bases/source-acceptance-test/unit_tests/test_incremental.py @@ -0,0 +1,112 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + +from datetime import datetime +from unittest.mock import MagicMock + +import pendulum +import pytest +from airbyte_cdk.models import ( + AirbyteMessage, + AirbyteRecordMessage, + AirbyteStateMessage, + AirbyteStream, + ConfiguredAirbyteCatalog, + ConfiguredAirbyteStream, + Type, +) +from source_acceptance_test.config import IncrementalConfig +from source_acceptance_test.tests.test_incremental import TestIncremental as _TestIncremental +from source_acceptance_test.tests.test_incremental import compare_cursor_with_threshold + + +def build_messages_from_record_data(records: list[dict]) -> list[AirbyteMessage]: + return [ + AirbyteMessage(type=Type.RECORD, record=AirbyteRecordMessage(stream="test_stream", data=data, emitted_at=111)) for data in records + ] + + +def build_state_message(state: dict) -> AirbyteMessage: + return AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(data=state)) + + +@pytest.mark.parametrize( + "record_value, state_value, threshold_days, expected_result", + [ + (datetime(2020, 10, 10), datetime(2020, 10, 9), 0, True), + (datetime(2020, 10, 10), datetime(2020, 10, 11), 0, False), + (datetime(2020, 10, 10), datetime(2020, 10, 11), 1, True), + (pendulum.parse("2020-10-10"), pendulum.parse("2020-10-09"), 0, True), + (pendulum.parse("2020-10-10"), pendulum.parse("2020-10-11"), 0, False), + (pendulum.parse("2020-10-10"), pendulum.parse("2020-10-11"), 1, True), + ("2020-10-10", "2020-10-09", 0, True), + ("2020-10-10", "2020-10-11", 0, False), + ("2020-10-10", "2020-10-11", 1, True), + (1602288000000, 1602201600000, 0, True), + (1602288000000, 1602374400000, 0, False), + (1602288000000, 1602374400000, 1, True), + (1602288000, 1602201600, 0, True), + (1602288000, 1602374400, 0, False), + (1602288000, 1602374400, 1, True), + ("aaa", "bbb", 0, False), + ("bbb", "aaa", 0, True), + ], +) +def test_compare_cursor_with_threshold(record_value, state_value, threshold_days, expected_result): + assert compare_cursor_with_threshold(record_value, state_value, threshold_days) == expected_result + + +@pytest.mark.parametrize("cursor_type", ["date", "string"]) +@pytest.mark.parametrize( + "records1, records2, latest_state, threshold_days, expected_error", + [ + ([{"date": "2020-01-01"}, {"date": "2020-01-02"}], [], "2020-01-02", 0, None), + ([{"date": "2020-01-02"}, {"date": "2020-01-03"}], [], "2020-01-02", 0, "First incremental sync should produce records younger"), + ([{"date": "2020-01-01"}, {"date": "2020-01-02"}], [{"date": "2020-01-02"}, {"date": "2020-01-03"}], "2020-01-02", 0, None), + ([{"date": "2020-01-01"}], [{"date": "2020-01-01"}], "2020-01-02", 0, "Second incremental sync should produce records older"), + ([{"date": "2020-01-01"}, {"date": "2020-01-02"}], [{"date": "2020-01-01"}, {"date": "2020-01-02"}], "2020-01-03", 2, None), + ([{"date": "2020-01-02"}, {"date": "2020-01-03"}], [], "2020-01-02", 2, "First incremental sync should produce records younger"), + ([{"date": "2020-01-01"}], [{"date": "2020-01-02"}], "2020-01-06", 3, "Second incremental sync should produce records older"), + ], +) +def test_incremental_two_sequential_reads(records1, records2, latest_state, threshold_days, cursor_type, expected_error): + input_config = IncrementalConfig(threshold_days=threshold_days) + cursor_paths = {"test_stream": ["date"]} + catalog = ConfiguredAirbyteCatalog( + streams=[ + ConfiguredAirbyteStream( + stream=AirbyteStream( + name="test_stream", + json_schema={"type": "object", "properties": {"date": {"type": cursor_type}}}, + supported_sync_modes=["full_refresh", "incremental"], + ), + sync_mode="incremental", + destination_sync_mode="overwrite", + cursor_field=["date"], + ) + ] + ) + + docker_runner_mock = MagicMock() + docker_runner_mock.call_read.return_value = [*build_messages_from_record_data(records1), build_state_message({"date": latest_state})] + docker_runner_mock.call_read_with_state.return_value = build_messages_from_record_data(records2) + + t = _TestIncremental() + if expected_error: + with pytest.raises(AssertionError, match=expected_error): + t.test_two_sequential_reads( + inputs=input_config, + connector_config=MagicMock(), + configured_catalog_for_incremental=catalog, + cursor_paths=cursor_paths, + docker_runner=docker_runner_mock, + ) + else: + t.test_two_sequential_reads( + inputs=input_config, + connector_config=MagicMock(), + configured_catalog_for_incremental=catalog, + cursor_paths=cursor_paths, + docker_runner=docker_runner_mock, + ) diff --git a/docs/connector-development/testing-connectors/source-acceptance-tests-reference.md b/docs/connector-development/testing-connectors/source-acceptance-tests-reference.md index ca63ca12844d4..1454868754e8b 100644 --- a/docs/connector-development/testing-connectors/source-acceptance-tests-reference.md +++ b/docs/connector-development/testing-connectors/source-acceptance-tests-reference.md @@ -185,12 +185,13 @@ This test performs two read operations on all streams which support full refresh This test verifies that all streams in the input catalog which support incremental sync can do so correctly. It does this by running two read operations: the first takes the configured catalog and config provided to this test as input. It then verifies that the sync produced a non-zero number of `RECORD` and `STATE` messages. The second read takes the same catalog and config used in the first test, plus the last `STATE` message output by the first read operation as the input state file. It verifies that either no records are produced \(since we read all records in the first sync\) or all records that produced have cursor value greater or equal to cursor value from `STATE` message. This test is performed only for streams that support incremental. Streams that do not support incremental sync are ignored. If no streams in the input catalog support incremental sync, this test is skipped. -| Input | Type | Default | Note | -| :--- | :--- | :--- | :--- | -| `config_path` | string | `secrets/config.json` | Path to a JSON object representing a valid connector configuration | -| `configured_catalog_path` | string | `integration_tests/configured_catalog.json` | Path to configured catalog | -| `cursor_paths` | dict | {} | For each stream, the path of its cursor field in the output state messages. If omitted the path will be taken from the last piece of path from stream cursor\_field. | -| `timeout_seconds` | int | 20\*60 | Test execution timeout in seconds | +| Input | Type | Default | Note | +|:--------------------------|:-------|:--------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| `config_path` | string | `secrets/config.json` | Path to a JSON object representing a valid connector configuration | +| `configured_catalog_path` | string | `integration_tests/configured_catalog.json` | Path to configured catalog | +| `cursor_paths` | dict | {} | For each stream, the path of its cursor field in the output state messages. If omitted the path will be taken from the last piece of path from stream cursor\_field. | +| `timeout_seconds` | int | 20\*60 | Test execution timeout in seconds | +| `threshold_days` | int | 0 | For date-based cursors, allow records to be emitted with a cursor value this number of days before the state value. | ### TestStateWithAbnormallyLargeValues From 1a999b7191016bbc21806884a5c1b185e6169e2f Mon Sep 17 00:00:00 2001 From: Jonathan Pearlin Date: Wed, 11 May 2022 16:12:49 -0400 Subject: [PATCH 31/55] Close underlying connections during migration (#12710) --- .../db/instance/FlywayMigrationDatabase.java | 21 +++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/FlywayMigrationDatabase.java b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/FlywayMigrationDatabase.java index 11d529eff2522..94e23762baea0 100644 --- a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/FlywayMigrationDatabase.java +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/FlywayMigrationDatabase.java @@ -8,6 +8,7 @@ import io.airbyte.db.factory.DSLContextFactory; import io.airbyte.db.factory.DataSourceFactory; import io.airbyte.db.factory.FlywayFactory; +import java.io.Closeable; import java.io.IOException; import java.sql.Connection; import javax.sql.DataSource; @@ -18,6 +19,8 @@ import org.jooq.meta.postgres.PostgresDatabase; import org.jooq.tools.StringUtils; import org.jooq.tools.jdbc.JDBCUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.testcontainers.containers.PostgreSQLContainer; /** @@ -33,10 +36,16 @@ */ public abstract class FlywayMigrationDatabase extends PostgresDatabase { + private static final Logger LOGGER = LoggerFactory.getLogger(FlywayMigrationDatabase.class); + private static final String DEFAULT_DOCKER_IMAGE = "postgres:13-alpine"; private Connection connection; + private DataSource dataSource; + + private DSLContext dslContext; + protected abstract Database getAndInitializeDatabase(DSLContext dslContext) throws IOException; protected abstract DatabaseMigrator getDatabaseMigrator(Database database, Flyway flyway); @@ -75,9 +84,9 @@ private void createInternalConnection() throws Exception { .withPassword("jooq_generator"); container.start(); - final DataSource dataSource = + dataSource = DataSourceFactory.create(container.getUsername(), container.getPassword(), container.getDriverClassName(), container.getJdbcUrl()); - final DSLContext dslContext = DSLContextFactory.create(dataSource, SQLDialect.POSTGRES); + dslContext = DSLContextFactory.create(dataSource, SQLDialect.POSTGRES); final Flyway flyway = FlywayFactory.create(dataSource, getInstalledBy(), getDbIdentifier(), getMigrationFileLocations()); final Database database = getAndInitializeDatabase(dslContext); final DatabaseMigrator migrator = getDatabaseMigrator(database, flyway); @@ -91,6 +100,14 @@ private void createInternalConnection() throws Exception { public void close() { JDBCUtils.safeClose(connection); connection = null; + dslContext.close(); + if (dataSource instanceof Closeable closeable) { + try { + closeable.close(); + } catch (final IOException e) { + LOGGER.warn("Unable to close data source.", e); + } + } super.close(); } From feb0d2f37803929a1ad0c723eea430f8cd6c201f Mon Sep 17 00:00:00 2001 From: Baz Date: Thu, 12 May 2022 01:10:28 +0300 Subject: [PATCH 32/55] =?UTF-8?q?=F0=9F=8E=89=20New=20Destination:=20Imple?= =?UTF-8?q?ment=20`Destination=20Google=20Sheets`=20using=20CDK=20(#12135)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../seed/destination_definitions.yaml | 6 + .../resources/seed/destination_specs.yaml | 61 ++++++++ airbyte-integrations/builds.md | 1 + .../destination-google-sheets/.dockerignore | 5 + .../destination-google-sheets/Dockerfile | 17 ++ .../destination-google-sheets/README.md | 123 +++++++++++++++ .../destination-google-sheets/build.gradle | 8 + .../destination_google_sheets/__init__.py | 8 + .../destination_google_sheets/buffer.py | 113 +++++++++++++ .../destination_google_sheets/client.py | 48 ++++++ .../destination_google_sheets/destination.py | 81 ++++++++++ .../destination_google_sheets/helpers.py | 78 +++++++++ .../destination_google_sheets/spec.json | 56 +++++++ .../destination_google_sheets/spreadsheet.py | 97 ++++++++++++ .../destination_google_sheets/writer.py | 91 +++++++++++ .../integration_tests/configured_catalog.json | 142 +++++++++++++++++ .../integration_tests/integration_test.py | 7 + .../sample_config_oauth.json | 8 + .../integration_tests/test_buffer.py | 137 ++++++++++++++++ .../integration_tests/test_client.py | 45 ++++++ .../integration_tests/test_data/messages.txt | 20 +++ .../test_data/test_buffer_catalog.json | 88 +++++++++++ .../test_data/test_destination_messages.txt | 1 + .../test_data/test_writer_catalog.json | 32 ++++ .../integration_tests/test_destination.py | 82 ++++++++++ .../integration_tests/test_helpers.py | 59 +++++++ .../integration_tests/test_spreadsheet.py | 87 ++++++++++ .../integration_tests/test_writer.py | 148 ++++++++++++++++++ .../destination-google-sheets/main.py | 11 ++ .../requirements.txt | 1 + .../destination-google-sheets/setup.py | 28 ++++ .../components/Sections/auth/AuthButton.tsx | 3 +- docs/SUMMARY.md | 1 + docs/integrations/README.md | 1 + .../destinations/google-sheets.md | 111 +++++++++++++ 35 files changed, 1804 insertions(+), 1 deletion(-) create mode 100644 airbyte-integrations/connectors/destination-google-sheets/.dockerignore create mode 100644 airbyte-integrations/connectors/destination-google-sheets/Dockerfile create mode 100644 airbyte-integrations/connectors/destination-google-sheets/README.md create mode 100644 airbyte-integrations/connectors/destination-google-sheets/build.gradle create mode 100644 airbyte-integrations/connectors/destination-google-sheets/destination_google_sheets/__init__.py create mode 100644 airbyte-integrations/connectors/destination-google-sheets/destination_google_sheets/buffer.py create mode 100644 airbyte-integrations/connectors/destination-google-sheets/destination_google_sheets/client.py create mode 100644 airbyte-integrations/connectors/destination-google-sheets/destination_google_sheets/destination.py create mode 100644 airbyte-integrations/connectors/destination-google-sheets/destination_google_sheets/helpers.py create mode 100644 airbyte-integrations/connectors/destination-google-sheets/destination_google_sheets/spec.json create mode 100644 airbyte-integrations/connectors/destination-google-sheets/destination_google_sheets/spreadsheet.py create mode 100644 airbyte-integrations/connectors/destination-google-sheets/destination_google_sheets/writer.py create mode 100644 airbyte-integrations/connectors/destination-google-sheets/integration_tests/configured_catalog.json create mode 100644 airbyte-integrations/connectors/destination-google-sheets/integration_tests/integration_test.py create mode 100644 airbyte-integrations/connectors/destination-google-sheets/integration_tests/sample_config_oauth.json create mode 100644 airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_buffer.py create mode 100644 airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_client.py create mode 100644 airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_data/messages.txt create mode 100644 airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_data/test_buffer_catalog.json create mode 100644 airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_data/test_destination_messages.txt create mode 100644 airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_data/test_writer_catalog.json create mode 100644 airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_destination.py create mode 100644 airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_helpers.py create mode 100644 airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_spreadsheet.py create mode 100644 airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_writer.py create mode 100644 airbyte-integrations/connectors/destination-google-sheets/main.py create mode 100644 airbyte-integrations/connectors/destination-google-sheets/requirements.txt create mode 100644 airbyte-integrations/connectors/destination-google-sheets/setup.py create mode 100644 docs/integrations/destinations/google-sheets.md diff --git a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml index 50d7bdb143b62..b2c73015c1fff 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml @@ -287,4 +287,10 @@ dockerImageTag: 0.1.1 documentationUrl: https://docs.airbyte.io/integrations/destinations/scylla icon: scylla.svg +- name: Google Sheets + destinationDefinitionId: a4cbd2d1-8dbe-4818-b8bc-b90ad782d12a + dockerRepository: airbyte/destination-google-sheets + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.io/integrations/destinations/google-sheets + icon: google-sheets.svg releaseStage: alpha diff --git a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml index 1940432fba84e..d2396ed7deedd 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml @@ -4779,3 +4779,64 @@ supported_destination_sync_modes: - "overwrite" - "append" +- dockerImage: "airbyte/destination-google-sheets:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/destinations/google-sheets" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Destination Google Sheets" + type: "object" + required: + - "spreadsheet_id" + - "credentials" + additionalProperties: false + properties: + spreadsheet_id: + type: "string" + title: "Spreadsheet Link" + description: "The link to your spreadsheet. See this\ + \ guide for more details." + examples: + - "https://docs.google.com/spreadsheets/d/1hLd9Qqti3UyLXZB2aFfUWDT7BG/edit" + credentials: + type: "object" + title: "* Authentication via Google (OAuth)" + description: "Google API Credentials for connecting to Google Sheets and\ + \ Google Drive APIs" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Google Sheets developer application." + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Google Sheets developer application." + airbyte_secret: true + refresh_token: + title: "Refresh Token" + type: "string" + description: "The token for obtaining new access token." + airbyte_secret: true + supportsIncremental: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: + - "overwrite" + - "append" + - "append_dedup" + authSpecification: + auth_type: "oauth2.0" + oauth2Specification: + rootObject: + - "credentials" + oauthFlowInitParameters: + - - "client_id" + - - "client_secret" + oauthFlowOutputParameters: + - - "refresh_token" diff --git a/airbyte-integrations/builds.md b/airbyte-integrations/builds.md index d39bf471be584..64ab8590280a5 100644 --- a/airbyte-integrations/builds.md +++ b/airbyte-integrations/builds.md @@ -128,6 +128,7 @@ | Google Cloud Storage (GCS) | [![destination-gcs](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-gcs%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-gcs) | | Google Firestore | [![destination-firestore](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-firestore%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-firestore) | | Google PubSub | [![destination-pubsub](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-pubsub%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-pubsub) | +| Google Sheets | [![destination-sheets](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-sheets%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-sheets) | | Kafka | [![destination-kafka](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-kafka%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-kafka) | | Keen (Chargify) | [![destination-keen](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-keen%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-keen) | | Local CSV | [![destination-csv](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-csv%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-csv) | diff --git a/airbyte-integrations/connectors/destination-google-sheets/.dockerignore b/airbyte-integrations/connectors/destination-google-sheets/.dockerignore new file mode 100644 index 0000000000000..f281200c81649 --- /dev/null +++ b/airbyte-integrations/connectors/destination-google-sheets/.dockerignore @@ -0,0 +1,5 @@ +* +!Dockerfile +!main.py +!destination_google_sheets +!setup.py diff --git a/airbyte-integrations/connectors/destination-google-sheets/Dockerfile b/airbyte-integrations/connectors/destination-google-sheets/Dockerfile new file mode 100644 index 0000000000000..16799eb9f561c --- /dev/null +++ b/airbyte-integrations/connectors/destination-google-sheets/Dockerfile @@ -0,0 +1,17 @@ +FROM python:3.9-slim + +# Bash is installed for more convenient debugging. +RUN apt-get update && apt-get install -y bash && rm -rf /var/lib/apt/lists/* + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" + +WORKDIR /airbyte/integration_code +COPY destination_google_sheets ./destination_google_sheets +COPY setup.py ./ +COPY main.py ./ +RUN pip install . + +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/destination-google-sheets diff --git a/airbyte-integrations/connectors/destination-google-sheets/README.md b/airbyte-integrations/connectors/destination-google-sheets/README.md new file mode 100644 index 0000000000000..4f5752367415c --- /dev/null +++ b/airbyte-integrations/connectors/destination-google-sheets/README.md @@ -0,0 +1,123 @@ +# Google Sheets Destination + +This is the repository for the Google Sheets destination connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/destinations/google-sheets). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.9.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Building via Gradle +From the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:destination-google-sheets:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/destinations/google-sheets) +to generate the necessary credentials. Then create a file `secrets/config_oauth.json` conforming to the `destination_google_sheets/spec.json` file. +Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config_oauth.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `destination google-sheets test creds` +and place them into `secrets/config.json`. + +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config_oauth.json +python main.py discover --config secrets/config_oauth.json +cat integration_tests/test_data/messages.txt | python main.py write --config secrets/config_oauth.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/destination-google-sheets:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:destination-google-sheets:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/destination-google-sheets:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-google-sheets:dev check --config /secrets/config_oauth.json +# messages.txt is a file containing line-separated JSON representing AirbyteMessages +cat integration_tests/test_data/messages.txt | docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-google-sheets:dev write --config /secrets/config_oauth.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. +First install test dependencies into your virtual environment: +``` +pip install .[tests] +``` +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +python -m pytest unit_tests +``` + +### Integration Tests +There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all destination connectors) and custom integration tests (which are specific to this connector). +#### Custom Integration tests +Place custom tests inside `integration_tests/` folder, then, from the connector root, run +``` +python -m pytest integration_tests +``` +#### Acceptance Tests +Coming soon: + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:destination-google-sheets:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:destination-google-sheets:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/destination-google-sheets/build.gradle b/airbyte-integrations/connectors/destination-google-sheets/build.gradle new file mode 100644 index 0000000000000..e4e62c47f2e22 --- /dev/null +++ b/airbyte-integrations/connectors/destination-google-sheets/build.gradle @@ -0,0 +1,8 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' +} + +airbytePython { + moduleDirectory 'destination_google_sheets' +} diff --git a/airbyte-integrations/connectors/destination-google-sheets/destination_google_sheets/__init__.py b/airbyte-integrations/connectors/destination-google-sheets/destination_google_sheets/__init__.py new file mode 100644 index 0000000000000..6f8d8a15ce80a --- /dev/null +++ b/airbyte-integrations/connectors/destination-google-sheets/destination_google_sheets/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + + +from .destination import DestinationGoogleSheets + +__all__ = ["DestinationGoogleSheets"] diff --git a/airbyte-integrations/connectors/destination-google-sheets/destination_google_sheets/buffer.py b/airbyte-integrations/connectors/destination-google-sheets/destination_google_sheets/buffer.py new file mode 100644 index 0000000000000..8f1f2f4d8b505 --- /dev/null +++ b/airbyte-integrations/connectors/destination-google-sheets/destination_google_sheets/buffer.py @@ -0,0 +1,113 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + + +from typing import Any, Mapping + +from airbyte_cdk import AirbyteLogger +from airbyte_cdk.models import AirbyteStream + + +class WriteBufferMixin: + + # Default instance of AirbyteLogger + logger = AirbyteLogger() + # interval after which the records_buffer should be cleaned up for selected stream + flush_interval = 1000 + + def __init__(self): + # Buffer for input records + self.records_buffer = {} + # Placeholder for streams metadata + self.stream_info = {} + + @property + def default_missing(self) -> str: + """ + Default value for missing keys in record stream, compared to configured_stream catalog. + Overwrite if needed. + """ + return "" + + def init_buffer_stream(self, configured_stream: AirbyteStream): + """ + Saves important stream's information for later use. + + Particulary, creates the data structure for `records_stream`. + Populates `stream_info` placeholder with stream metadata information. + """ + stream = configured_stream.stream + self.records_buffer[stream.name] = [] + self.stream_info[stream.name] = { + "headers": sorted(list(stream.json_schema.get("properties").keys())), + "is_set": False, + } + + def add_to_buffer(self, stream_name: str, record: Mapping): + """ + Populates input records to `records_buffer`. + + 1) normalizes input record + 2) coerces normalized record to str + 3) gets values as list of record values from record mapping. + """ + + norm_record = self._normalize_record(stream_name, record) + norm_values = list(map(str, norm_record.values())) + self.records_buffer[stream_name].append(norm_values) + + def clear_buffer(self, stream_name: str): + """ + Cleans up the `records_buffer` values, belonging to input stream. + """ + self.records_buffer[stream_name].clear() + + def _normalize_record(self, stream_name: str, record: Mapping) -> Mapping[str, Any]: + """ + Updates the record keys up to the input configured_stream catalog keys. + + Handles two scenarios: + 1) when record has less keys than catalog declares (undersetting) + 2) when record has more keys than catalog declares (oversetting) + + Returns: alphabetically sorted, catalog-normalized Mapping[str, Any]. + + EXAMPLE: + - UnderSetting: + * Catalog: + - has 3 entities: + [ 'id', 'key1', 'key2' ] + ^ + * Input record: + - missing 1 entity, compare to catalog + { 'id': 123, 'key2': 'value' } + ^ + * Result: + - 'key1' has been added to the record, because it was declared in catalog, to keep the data structure. + {'id': 123, 'key1': '', {'key2': 'value'} } + ^ + - OverSetting: + * Catalog: + - has 3 entities: + [ 'id', 'key1', 'key2', ] + ^ + * Input record: + - doesn't have entity 'key1' + - has 1 more enitity, compare to catalog 'key3' + { 'id': 123, ,'key2': 'value', 'key3': 'value' } + ^ ^ + * Result: + - 'key1' was added, because it expected be the part of the record, to keep the data structure + - 'key3' was dropped, because it was not declared in catalog, to keep the data structure + { 'id': 123, 'key1': '', 'key2': 'value', } + ^ ^ + + """ + headers = self.stream_info[stream_name]["headers"] + # undersetting scenario + [record.update({key: self.default_missing}) for key in headers if key not in record.keys()] + # oversetting scenario + [record.pop(key) for key in record.copy().keys() if key not in headers] + + return dict(sorted(record.items(), key=lambda x: x[0])) diff --git a/airbyte-integrations/connectors/destination-google-sheets/destination_google_sheets/client.py b/airbyte-integrations/connectors/destination-google-sheets/destination_google_sheets/client.py new file mode 100644 index 0000000000000..4f3913fc75071 --- /dev/null +++ b/airbyte-integrations/connectors/destination-google-sheets/destination_google_sheets/client.py @@ -0,0 +1,48 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + + +from typing import Dict + +import pygsheets +from airbyte_cdk import AirbyteLogger +from google.auth.transport.requests import Request +from google.oauth2 import credentials as client_account +from pygsheets.client import Client as pygsheets_client + +# the list of required scopes/permissions +# more info: https://developers.google.com/sheets/api/guides/authorizing#OAuth2Authorizing +SCOPES = [ + "https://www.googleapis.com/auth/spreadsheets", + "https://www.googleapis.com/auth/drive.file", +] + + +class GoogleSheetsClient: + + logger = AirbyteLogger() + + def __init__(self, config: Dict): + self.config = config + self.retries = 100 # max number of backoff retries + + def authorize(self) -> pygsheets_client: + input_creds = self.config.get("credentials") + auth_creds = client_account.Credentials.from_authorized_user_info(info=input_creds) + client = pygsheets.authorize(custom_credentials=auth_creds) + + # Increase max number of retries if Rate Limit is reached. Error: + client.drive.retries = self.retries # for google drive api + client.sheet.retries = self.retries # for google sheets api + + # check if token is expired and refresh it + if client.oauth.expired: + self.logger.info("Auth session is expired. Refreshing...") + client.oauth.refresh(Request()) + if not client.oauth.expired: + self.logger.info("Successfully refreshed auth session") + else: + self.logger.fatal("The token is expired and could not be refreshed, please check the credentials are still valid!") + + return client diff --git a/airbyte-integrations/connectors/destination-google-sheets/destination_google_sheets/destination.py b/airbyte-integrations/connectors/destination-google-sheets/destination_google_sheets/destination.py new file mode 100644 index 0000000000000..89b76428ddd25 --- /dev/null +++ b/airbyte-integrations/connectors/destination-google-sheets/destination_google_sheets/destination.py @@ -0,0 +1,81 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + +from typing import Any, Iterable, Mapping + +from airbyte_cdk import AirbyteLogger +from airbyte_cdk.destinations import Destination +from airbyte_cdk.models import AirbyteConnectionStatus, AirbyteMessage, ConfiguredAirbyteCatalog, DestinationSyncMode, Status, Type +from google.auth.exceptions import RefreshError + +from .client import GoogleSheetsClient +from .helpers import ConnectionTest, get_spreadsheet_id, get_streams_from_catalog +from .spreadsheet import GoogleSheets +from .writer import GoogleSheetsWriter + + +class DestinationGoogleSheets(Destination): + def check(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> AirbyteConnectionStatus: + """ + Connection check method for Google Spreadsheets. + Info: + Checks whether target spreadsheet_id is available using provided credentials. + Returns: + :: Status.SUCCEEDED - if creadentials are valid, token is refreshed, target spreadsheet is available. + :: Status.FAILED - if could not obtain new token, target spreadsheet is not available or other exception occured (with message). + """ + spreadsheet_id = get_spreadsheet_id(config["spreadsheet_id"]) + try: + client = GoogleSheetsClient(config).authorize() + spreadsheet = GoogleSheets(client, spreadsheet_id) + check_result = ConnectionTest(spreadsheet).perform_connection_test() + if check_result: + return AirbyteConnectionStatus(status=Status.SUCCEEDED) + except RefreshError as token_err: + return AirbyteConnectionStatus(status=Status.FAILED, message=f"{token_err}") + except Exception as err: + return AirbyteConnectionStatus(status=Status.FAILED, message=f"An exception occurred: {repr(err)}") + + def write( + self, config: Mapping[str, Any], configured_catalog: ConfiguredAirbyteCatalog, input_messages: Iterable[AirbyteMessage] + ) -> Iterable[AirbyteMessage]: + + """ + Reads the input stream of messages, config, and catalog to write data to the destination. + """ + spreadsheet_id = get_spreadsheet_id(config["spreadsheet_id"]) + + client = GoogleSheetsClient(config).authorize() + spreadsheet = GoogleSheets(client, spreadsheet_id) + writer = GoogleSheetsWriter(spreadsheet) + + # get streams from catalog up to the limit + configured_streams = get_streams_from_catalog(configured_catalog) + # getting stream names explicitly + configured_stream_names = [stream.stream.name for stream in configured_streams] + + for configured_stream in configured_streams: + writer.init_buffer_stream(configured_stream) + if configured_stream.destination_sync_mode == DestinationSyncMode.overwrite: + writer.delete_stream_entries(configured_stream.stream.name) + + for message in input_messages: + if message.type == Type.RECORD: + record = message.record + # process messages for available streams only + if record.stream in configured_stream_names: + writer.add_to_buffer(record.stream, record.data) + writer.queue_write_operation(record.stream) + elif message.type == Type.STATE: + yield message + else: + continue + + # if there are any records left in buffer + writer.write_whats_left() + + # deduplicating records for `append_dedup` sync-mode + for configured_stream in configured_streams: + if configured_stream.destination_sync_mode == DestinationSyncMode.append_dedup: + writer.deduplicate_records(configured_stream) diff --git a/airbyte-integrations/connectors/destination-google-sheets/destination_google_sheets/helpers.py b/airbyte-integrations/connectors/destination-google-sheets/destination_google_sheets/helpers.py new file mode 100644 index 0000000000000..efe5efe5067d4 --- /dev/null +++ b/airbyte-integrations/connectors/destination-google-sheets/destination_google_sheets/helpers.py @@ -0,0 +1,78 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + + +import re +from typing import List + +from airbyte_cdk import AirbyteLogger +from airbyte_cdk.models import ConfiguredAirbyteCatalog +from pygsheets import Spreadsheet, Worksheet +from pygsheets.exceptions import WorksheetNotFound + +STREAMS_COUNT_LIMIT = 200 + + +logger = AirbyteLogger() + + +def get_spreadsheet_id(id_or_url: str) -> str: + if re.match(r"(http://)|(https://)", id_or_url): + m = re.search(r"(/)([-\w]{40,})([/]?)", id_or_url) + if m.group(2): + return m.group(2) + else: + logger.error( + "The provided URL doesn't match the requirements. See this guide for more details." + ) + else: + return id_or_url + + +def get_streams_from_catalog(catalog: ConfiguredAirbyteCatalog, limit: int = STREAMS_COUNT_LIMIT): + streams_count = len(catalog.streams) + if streams_count > limit: + logger.warn(f"Only {limit} of {streams_count} will be processed due to Google Sheets (worksheet count < {limit}) limitations.") + return catalog.streams[:limit] + return catalog.streams + + +class ConnectionTest: + + """ + Performs connection test write operation to ensure the target spreadsheet is available for writing. + Initiating the class itself, performs the connection test and stores the result in ConnectionTest.result property. + """ + + def __init__(self, spreadsheet: Spreadsheet): + self.spreadsheet = spreadsheet + self.wks_name: str = "_airbyte_conn_test" + self.test_data: List[str] = ["conn_test", "success"] + + def add_test_wks(self) -> Worksheet: + self.spreadsheet.spreadsheet.add_worksheet(self.wks_name, rows=2, cols=1) + return self.spreadsheet.open_worksheet(self.wks_name) + + def remove_test_wks(self): + wks = self.spreadsheet.open_worksheet(self.wks_name) + self.spreadsheet.spreadsheet.del_worksheet(wks) + + def populate_test_wks(self, wks: Worksheet) -> Worksheet: + wks.append_table(self.test_data, dimension="COLUMNS") + return wks + + def check_values(self, wks: Worksheet) -> bool: + value = wks.get_value("A2") + return True if value == self.test_data[1] else False + + def perform_connection_test(self) -> bool: + try: + if self.spreadsheet.spreadsheet.worksheets("title", self.wks_name): + self.remove_test_wks() + result: bool = self.check_values(self.populate_test_wks(self.add_test_wks())) + except WorksheetNotFound: + result: bool = self.check_values(self.populate_test_wks(self.add_test_wks())) + + self.remove_test_wks() + return result diff --git a/airbyte-integrations/connectors/destination-google-sheets/destination_google_sheets/spec.json b/airbyte-integrations/connectors/destination-google-sheets/destination_google_sheets/spec.json new file mode 100644 index 0000000000000..cdfa07c129cb5 --- /dev/null +++ b/airbyte-integrations/connectors/destination-google-sheets/destination_google_sheets/spec.json @@ -0,0 +1,56 @@ +{ + "documentationUrl": "https://docs.airbyte.io/integrations/destinations/google-sheets", + "supported_destination_sync_modes": ["overwrite", "append", "append_dedup"], + "supportsIncremental": true, + "supportsDBT": false, + "supportsNormalization": false, + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Destination Google Sheets", + "type": "object", + "required": ["spreadsheet_id", "credentials"], + "additionalProperties": false, + "properties": { + "spreadsheet_id": { + "type": "string", + "title": "Spreadsheet Link", + "description": "The link to your spreadsheet. See this guide for more details.", + "examples": ["https://docs.google.com/spreadsheets/d/1hLd9Qqti3UyLXZB2aFfUWDT7BG/edit"] + }, + "credentials": { + "type": "object", + "title": "* Authentication via Google (OAuth)", + "description": "Google API Credentials for connecting to Google Sheets and Google Drive APIs", + "required": ["client_id", "client_secret", "refresh_token"], + "properties": { + "client_id": { + "title": "Client ID", + "type": "string", + "description": "The Client ID of your Google Sheets developer application.", + "airbyte_secret": true + }, + "client_secret": { + "title": "Client Secret", + "type": "string", + "description": "The Client Secret of your Google Sheets developer application.", + "airbyte_secret": true + }, + "refresh_token": { + "title": "Refresh Token", + "type": "string", + "description": "The token for obtaining new access token.", + "airbyte_secret": true + } + } + } + } + }, + "authSpecification": { + "auth_type": "oauth2.0", + "oauth2Specification": { + "rootObject": ["credentials"], + "oauthFlowInitParameters": [["client_id"], ["client_secret"]], + "oauthFlowOutputParameters": [["refresh_token"]] + } + } +} diff --git a/airbyte-integrations/connectors/destination-google-sheets/destination_google_sheets/spreadsheet.py b/airbyte-integrations/connectors/destination-google-sheets/destination_google_sheets/spreadsheet.py new file mode 100644 index 0000000000000..7cba109fdd96b --- /dev/null +++ b/airbyte-integrations/connectors/destination-google-sheets/destination_google_sheets/spreadsheet.py @@ -0,0 +1,97 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + +from typing import List, Mapping + +from pygsheets import Spreadsheet, Worksheet +from pygsheets.client import Client as pygsheets_client +from pygsheets.exceptions import WorksheetNotFound + + +class GoogleSheets: + def __init__(self, client: pygsheets_client, spreadsheet_id: str): + self.client = client + self.spreadsheet_id = spreadsheet_id + + @property + def spreadsheet(self) -> Spreadsheet: + """ + Returns pygsheets.Spreadsheet with opened target spreadsheet by key. + """ + return self.client.open_by_key(self.spreadsheet_id) + + def open_worksheet(self, stream_name: str) -> Worksheet: + """ + Opens the connection to target worksheet, if exists. Otherwise, creates one. + """ + try: + stream = self.spreadsheet.worksheet_by_title(stream_name) + except WorksheetNotFound: + stream = self.spreadsheet.add_worksheet(stream_name) + return stream + + def clean_worksheet(self, stream_name: str): + """ + Cleans up the existing records inside the worksheet or creates one, if doesn't exist. + """ + try: + stream = self.open_worksheet(stream_name) + stream.clear() + except WorksheetNotFound: + self.spreadsheet.add_worksheet(stream_name) + + def set_headers(self, stream_name: str, headers_list: List[str]): + """ + Sets headers belonging to the input stream + """ + stream: Worksheet = self.open_worksheet(stream_name) + stream.update_row(1, headers_list) + + def index_cols(self, stream: Worksheet) -> Mapping[str, int]: + """ + Helps to find the index of every colums exists in worksheet. + Returns: Mapping with column name and it's index. + {"id": 1, "name": 2, ..., "other": 99} + """ + header = stream[1] # get the first row + col_index = {} + for i, col in enumerate(header): + col_index[col] = i + 1 + return col_index + + def find_duplicates(self, stream: Worksheet, primary_key: str): + """ + Finds the duplicated records inside of target worksheet. + Returns: List of indexes of rows to remove from target worksheet. + [1, 4, 5, ..., 99] + """ + rows_unique_values, rows_to_delete = {}, [] + pk_col_index = self.index_cols(stream)[primary_key] + + # get all values except 0, because it's a header value + pk_col_values = stream.get_col(pk_col_index, include_tailing_empty=False)[1:] + + for i, row_value in enumerate(pk_col_values, 2): + if row_value not in rows_unique_values: + rows_unique_values[row_value] = None + else: + rows_to_delete.append(i) + + # reverse the order of the list + rows_to_delete.reverse() + + return rows_to_delete + + def remove_duplicates(self, stream: Worksheet, rows_list: list): + """ + Removes duplicated rows, provided by `rows_list` as list of indexes. + + We are working with delete operation in offline mode, to decrease the number of API calls. + 1) Unlink the spreadsheet (make it for offline use) + 2) Perform delete operation and update the actual row index + 3) Link the spreadsheet (sync with online version) using batch_update method. + """ + stream.unlink() + [stream.delete_rows(row, 1) for row in rows_list] + stream.link() diff --git a/airbyte-integrations/connectors/destination-google-sheets/destination_google_sheets/writer.py b/airbyte-integrations/connectors/destination-google-sheets/destination_google_sheets/writer.py new file mode 100644 index 0000000000000..f7db744bbbb00 --- /dev/null +++ b/airbyte-integrations/connectors/destination-google-sheets/destination_google_sheets/writer.py @@ -0,0 +1,91 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + + +from airbyte_cdk.models import AirbyteStream +from pygsheets import Worksheet + +from .buffer import WriteBufferMixin +from .spreadsheet import GoogleSheets + + +class GoogleSheetsWriter(WriteBufferMixin): + def __init__(self, spreadsheet: GoogleSheets): + self.spreadsheet = spreadsheet + super().__init__() + + def delete_stream_entries(self, stream_name: str): + """ + Deletes all the records belonging to the input stream. + """ + self.spreadsheet.clean_worksheet(stream_name) + + def check_headers(self, stream_name: str): + """ + Checks whether data headers belonging to the input stream are set. + """ + stream = self.stream_info[stream_name] + if not stream["is_set"]: + self.spreadsheet.set_headers(stream_name, stream["headers"]) + self.stream_info[stream_name]["is_set"] = True + + def queue_write_operation(self, stream_name: str): + """ + Mimics `batch_write` operation using records_buffer. + + 1) gets data from the records_buffer + 2) writes it to the target worksheet + 3) cleans-up the records_buffer belonging to input stream + """ + + if len(self.records_buffer[stream_name]) == self.flush_interval: + self.write_from_queue(stream_name) + self.clear_buffer(stream_name) + + def write_from_queue(self, stream_name: str): + """ + Writes data from records_buffer for belonging to the input stream. + + 1) checks the headers are set + 2) gets the values from the records_buffer + 3) if there are records to write - writes them to the target worksheet + """ + + self.check_headers(stream_name) + values: list = self.records_buffer[stream_name] or [] + if values: + stream: Worksheet = self.spreadsheet.open_worksheet(stream_name) + self.logger.info(f"Writing data for stream: {stream_name}") + # we start from the cell of `A2` as starting range to fill the spreadsheet + stream.append_table(values, start="A2", dimension="ROWS") + else: + self.logger.info(f"Skipping empty stream: {stream_name}") + + def write_whats_left(self): + """ + Stands for writing records that are still left to be written, + but don't match the condition for `queue_write_operation`. + """ + for stream_name in self.records_buffer: + self.write_from_queue(stream_name) + self.clear_buffer(stream_name) + + def deduplicate_records(self, configured_stream: AirbyteStream): + """ + Finds and removes duplicated records for target stream, using `primary_key`. + Processing the worksheet happens offline and sync it afterwards to reduce API calls rate + If rate limits are hit while deduplicating, it will be handeled automatically, the operation continues after backoff. + """ + primary_key: str = configured_stream.primary_key[0][0] + stream_name: str = configured_stream.stream.name + + stream: Worksheet = self.spreadsheet.open_worksheet(stream_name) + rows_to_remove: list = self.spreadsheet.find_duplicates(stream, primary_key) + + if rows_to_remove: + self.logger.info(f"Duplicated records are found for stream: {stream_name}, resolving...") + self.spreadsheet.remove_duplicates(stream, rows_to_remove) + self.logger.info(f"Finished deduplicating records for stream: {stream_name}") + else: + self.logger.info(f"No duplicated records found for stream: {stream_name}") diff --git a/airbyte-integrations/connectors/destination-google-sheets/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/destination-google-sheets/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..4687514bb47f6 --- /dev/null +++ b/airbyte-integrations/connectors/destination-google-sheets/integration_tests/configured_catalog.json @@ -0,0 +1,142 @@ +{ + "streams": [ + { + "stream": { + "name": "stream_1", + "json_schema": { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "str": { + "type": ["null", "string"] + }, + "num": { + "type": ["null", "number"] + }, + "list": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } + }, + "dict": { + "type": ["null", "object"], + "properties": { + "type": ["null", "string"] + } + }, + "double_list": { + "type": ["null", "array"], + "items": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } + } + } + } + }, + "supported_sync_modes": ["full_refresh"], + "supported_destination_sync_modes": ["overwrite", "append", "append_dedup"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append_dedup", + "primary_key": [ [ "id" ] ] + }, + { + "stream": { + "name": "stream_2", + "json_schema": { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "str": { + "type": ["null", "string"] + }, + "num": { + "type": ["null", "number"] + }, + "list": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } + }, + "dict": { + "type": ["null", "object"], + "properties": { + "type": ["null", "string"] + } + }, + "double_list": { + "type": ["null", "array"], + "items": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } + } + } + } + }, + "supported_sync_modes": ["full_refresh"], + "supported_destination_sync_modes": ["overwrite", "append", "append_dedup"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append", + "primary_key": [ [ "id" ] ] + }, + { + "stream": { + "name": "stream_3", + "json_schema": { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "str": { + "type": ["null", "string"] + }, + "num": { + "type": ["null", "number"] + }, + "list": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } + }, + "dict": { + "type": ["null", "object"], + "properties": { + "type": ["null", "string"] + } + }, + "double_list": { + "type": ["null", "array"], + "items": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } + } + } + } + }, + "supported_sync_modes": ["full_refresh"], + "supported_destination_sync_modes": ["overwrite", "append"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite", + "primary_key": [ [ "id" ] ] + } + ] +} diff --git a/airbyte-integrations/connectors/destination-google-sheets/integration_tests/integration_test.py b/airbyte-integrations/connectors/destination-google-sheets/integration_tests/integration_test.py new file mode 100644 index 0000000000000..d06b970d93649 --- /dev/null +++ b/airbyte-integrations/connectors/destination-google-sheets/integration_tests/integration_test.py @@ -0,0 +1,7 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + +# fixture for the _customIntegrationTest test. +def test_fixture(): + assert True diff --git a/airbyte-integrations/connectors/destination-google-sheets/integration_tests/sample_config_oauth.json b/airbyte-integrations/connectors/destination-google-sheets/integration_tests/sample_config_oauth.json new file mode 100644 index 0000000000000..055843049889e --- /dev/null +++ b/airbyte-integrations/connectors/destination-google-sheets/integration_tests/sample_config_oauth.json @@ -0,0 +1,8 @@ +{ + "spreadsheet_id": "https://docs.google.com/spreadsheets/d//edit#gid=0", + "credentials": { + "client_id": "YOUR_OAUTH_CLIENT_ID", + "client_secret": "YOUR_OAUTH_CLIENT_SECRET", + "refresh_token": "YOUR_REFRESH_TOKEN" + } +} \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_buffer.py b/airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_buffer.py new file mode 100644 index 0000000000000..689bbf4c6c3a6 --- /dev/null +++ b/airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_buffer.py @@ -0,0 +1,137 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + + +import io +from typing import Iterable + +import pytest +from airbyte_cdk import AirbyteLogger +from airbyte_cdk.models import AirbyteMessage, ConfiguredAirbyteCatalog, Type +from destination_google_sheets.buffer import WriteBufferMixin + +# ----- PREPARE ENV ----- + +# path to configured_catalog json file +TEST_CATALOG_PATH: str = "integration_tests/test_data/test_buffer_catalog.json" +# path to test records txt file +TEST_RECORDS_PATH: str = "integration_tests/test_data/messages.txt" +# reading prepared catalog with streams +TEST_CATALOG: ConfiguredAirbyteCatalog = ConfiguredAirbyteCatalog.parse_file(TEST_CATALOG_PATH) +# instance of WriteBufferMixin +TEST_WRITE_BUFFER: WriteBufferMixin = WriteBufferMixin() + + +# reading input messages from file +def read_input_messages(records_path: str) -> Iterable[AirbyteMessage]: + with open(records_path, "rb") as f: + input_stream = io.TextIOWrapper(f, encoding="utf-8") + for line in input_stream: + yield AirbyteMessage.parse_raw(line) + + +# ----- BEGIN TESTS ----- + + +def test_logger(): + test_logger = TEST_WRITE_BUFFER.logger + assert isinstance(test_logger, AirbyteLogger) + + +@pytest.mark.parametrize( + "buffer, stream_name", + [ + (TEST_WRITE_BUFFER.records_buffer, "stream_1"), + (TEST_WRITE_BUFFER.records_buffer, "stream_2"), + (TEST_WRITE_BUFFER.records_buffer, "stream_3"), + ((TEST_WRITE_BUFFER.stream_info), "stream_1"), + (TEST_WRITE_BUFFER.stream_info, "stream_2"), + (TEST_WRITE_BUFFER.stream_info, "stream_2"), + ], + ids=[ + "records_buf_stream_1", + "records_buf_stream_2", + "records_buf_stream_3", + "stream_info_stream_1", + "stream_info_stream_2", + "stream_info_stream_3", + ], +) +def test_init_buffer_stream(buffer, stream_name): + for configured_stream in TEST_CATALOG.streams: + TEST_WRITE_BUFFER.init_buffer_stream(configured_stream) + + for stream in buffer: + if stream_name in stream: + assert stream_name in stream + + +def test_add_to_buffer(input_messages=read_input_messages(TEST_RECORDS_PATH)): + for message in input_messages: + if message.type == Type.RECORD: + record = message.record + TEST_WRITE_BUFFER.add_to_buffer(record.stream, record.data) + else: + continue + + for stream in TEST_WRITE_BUFFER.records_buffer: + assert len(TEST_WRITE_BUFFER.records_buffer[stream]) > 0 + + +@pytest.mark.parametrize( + "stream_name, expected_count", + [ + ("stream_1", 7), + ("stream_2", 6), + ("stream_3", 6), + ], + ids=["stream_1", "stream_2", "stream_3"], +) +def test_records_count_in_buffer(stream_name, expected_count): + assert len(TEST_WRITE_BUFFER.records_buffer[stream_name]) == expected_count + + +@pytest.mark.parametrize( + "stream_name", + [ + ("stream_1"), + ("stream_2"), + ("stream_3"), + ], + ids=["stream_1", "stream_2", "stream_3"], +) +def test_clear_buffer(stream_name): + TEST_WRITE_BUFFER.clear_buffer(stream_name) + # check the buffer is cleaned + assert len(TEST_WRITE_BUFFER.records_buffer[stream_name]) == 0 + + +@pytest.mark.parametrize( + "stream_name, record, expected", + [ + ("stream_1", {"id": 123}, {"id": 123, "key1": "", "list": ""}), + ("stream_2", {"id": 123, "key2": "value"}, {"id": 123, "key1": "", "list": ""}), + ("stream_3", {}, {"id": "", "key1": "", "list": ""}), + ], + ids=["Undersetting", "Oversetting", "empty_record"], +) +def test_normalize_record(stream_name, record, expected): + actual = TEST_WRITE_BUFFER._normalize_record(stream_name, record) + assert actual == expected + + +@pytest.mark.parametrize( + "buffer, expected_len", + [ + (TEST_WRITE_BUFFER.records_buffer, 0), + (TEST_WRITE_BUFFER.stream_info, 0), + ], + ids=["records_buffer", "stream_info"], +) +def test_check_buffers_are_null(buffer, expected_len): + buffer.clear() + assert len(buffer) == expected_len + + +# ----- END TESTS ----- diff --git a/airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_client.py b/airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_client.py new file mode 100644 index 0000000000000..253013f160291 --- /dev/null +++ b/airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_client.py @@ -0,0 +1,45 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + + +import pytest +from destination_google_sheets.client import GoogleSheetsClient +from integration_tests.test_helpers import TEST_CONFIG +from pygsheets.client import Client as pygsheets_client + +# ----- PREPARE ENV ----- + +# path to configured_catalog json file +TEST_CATALOG_PATH: str = "integration_tests/test_data/test_catalog.json" +# client instance +TEST_CLIENT: pygsheets_client = GoogleSheetsClient(TEST_CONFIG) +# authorized client +AUTHORIZED_CLIENT: pygsheets_client = TEST_CLIENT.authorize() + + +# ----- BEGIN TESTS ----- + + +def test_client(): + client = TEST_CLIENT.authorize() + assert isinstance(client, pygsheets_client) + # if the authentication is successful we will have `token_uri` and `expiry` properties inside. + for i in ["token_uri", "expiry"]: + assert i in client.oauth.to_json() + + +@pytest.mark.parametrize( + "property, expected_retries", + [ + (TEST_CLIENT.retries, 100), + (AUTHORIZED_CLIENT.drive.retries, 100), + (AUTHORIZED_CLIENT.sheet.retries, 100), + ], + ids=["client_main_retries", "client_drive_retries", "client_sheet_retries"], +) +def test_max_retries_are_set(property, expected_retries): + assert property == expected_retries + + +# ----- END TESTS ----- diff --git a/airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_data/messages.txt b/airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_data/messages.txt new file mode 100644 index 0000000000000..419a961d79b31 --- /dev/null +++ b/airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_data/messages.txt @@ -0,0 +1,20 @@ +{"type": "RECORD", "record": {"stream": "stream_1", "emitted_at": 1602637589000, "data": { "id" : "1"}}} +{"type": "RECORD", "record": {"stream": "stream_1", "emitted_at": 1602637589000, "data": { "id" : "2", "str": "test2"}}} +{"type": "RECORD", "record": {"stream": "stream_1", "emitted_at": 1602637589000, "data": { "id" : "3", "str": "test3", "num": 3 }}} +{"type": "RECORD", "record": {"stream": "stream_1", "emitted_at": 1602637589000, "data": { "id" : "4", "str": "test4", "num": 4, "list": ["test4"] }}} +{"type": "RECORD", "record": {"stream": "stream_1", "emitted_at": 1602637589000, "data": { "id" : "5", "str": "test5", "num": 5, "list": ["test5"], "dict": {"key": "value5"} }}} +{"type": "RECORD", "record": {"stream": "stream_1", "emitted_at": 1602637589000, "data": { "id" : "6", "str": "test6", "num": 6, "list": ["test6"], "dict": {"key": "value6"}, "double_list": [["test_double_list6"]] }}} +{"type": "RECORD", "record": {"stream": "stream_1", "emitted_at": 1602637589000, "data": { "id" : "7", "str": "test7", "num": 7, "list": ["test7"], "dict": {"key": "value7"}, "double_list": [["test_double_list7"]], "other": "other" }}} +{"type": "RECORD", "record": {"stream": "stream_2", "emitted_at": 1602637589000, "data": { "id" : "1"}}} +{"type": "RECORD", "record": {"stream": "stream_2", "emitted_at": 1602637589000, "data": { "id" : "2", "str": "test2" }}} +{"type": "RECORD", "record": {"stream": "stream_2", "emitted_at": 1602637589000, "data": { "id" : "3", "str": "test3", "num": 333 }}} +{"type": "RECORD", "record": {"stream": "stream_2", "emitted_at": 1602637589000, "data": { "id" : "4", "str": "test4", "num": 444, "list": ["test4"]}}} +{"type": "RECORD", "record": {"stream": "stream_2", "emitted_at": 1602637589000, "data": { "id" : "5", "str": "test5", "num": 555, "list": ["test5"], "dict": {"key": "value5"} }}} +{"type": "RECORD", "record": {"stream": "stream_2", "emitted_at": 1602637589000, "data": { "id" : "6", "str": "test6", "num": 666, "list": ["test6"], "dict": {"key": "value6"}, "double_list": [["test_double_list6"]] }}} +{"type": "RECORD", "record": {"stream": "stream_3", "emitted_at": 1602637589000, "data": { "id" : "1"}}} +{"type": "RECORD", "record": {"stream": "stream_3", "emitted_at": 1602637589000, "data": { "id" : "2", "str": "test2" }}} +{"type": "RECORD", "record": {"stream": "stream_3", "emitted_at": 1602637589000, "data": { "id" : "3", "str": "test3", "num": 333 }}} +{"type": "RECORD", "record": {"stream": "stream_3", "emitted_at": 1602637589000, "data": { "id" : "4", "str": "test4", "num": 444, "list": ["test4"]}}} +{"type": "RECORD", "record": {"stream": "stream_3", "emitted_at": 1602637589000, "data": { "id" : "5", "str": "test5", "num": 555, "list": ["test5"], "dict": {"key": "value5"} }}} +{"type": "RECORD", "record": {"stream": "stream_3", "emitted_at": 1602637589000, "data": { "id" : "6", "str": "test6", "num": 666, "list": ["test6"], "dict": {"key": "value6"}, "double_list": [["test_double_list6"]] }}} +{"type": "STATE", "state": { "data": {"start_date": "2022-04-15"}}} \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_data/test_buffer_catalog.json b/airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_data/test_buffer_catalog.json new file mode 100644 index 0000000000000..9477cc66ee959 --- /dev/null +++ b/airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_data/test_buffer_catalog.json @@ -0,0 +1,88 @@ +{ + "streams": [ + { + "stream": { + "name": "stream_1", + "json_schema": { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "key1": { + "type": ["null", "numeric"] + }, + "list": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } + } + } + }, + "supported_sync_modes": ["full_refresh"], + "supported_destination_sync_modes": ["overwrite", "append", "append_dedup"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append_dedup", + "primary_key": [ [ "id" ] ] + }, + { + "stream": { + "name": "stream_2", + "json_schema": { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "key1": { + "type": ["null", "numeric"] + }, + "list": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } + } + } + }, + "supported_sync_modes": ["full_refresh"], + "supported_destination_sync_modes": ["overwrite", "append", "append_dedup"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append", + "primary_key": [ [ "id" ] ] + }, + { + "stream": { + "name": "stream_3", + "json_schema": { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "key1": { + "type": ["null", "numeric"] + }, + "list": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } + } + } + }, + "supported_sync_modes": ["full_refresh"], + "supported_destination_sync_modes": ["overwrite", "append"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite", + "primary_key": [ [ "id" ] ] + } + ] +} diff --git a/airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_data/test_destination_messages.txt b/airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_data/test_destination_messages.txt new file mode 100644 index 0000000000000..24854f2826fce --- /dev/null +++ b/airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_data/test_destination_messages.txt @@ -0,0 +1 @@ +{"type": "RECORD", "record": {"stream": "stream_1", "emitted_at": 1602637589000, "data": {"id" : "1"}}} \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_data/test_writer_catalog.json b/airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_data/test_writer_catalog.json new file mode 100644 index 0000000000000..c0ca2263cf6f6 --- /dev/null +++ b/airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_data/test_writer_catalog.json @@ -0,0 +1,32 @@ +{ + "streams": [ + { + "stream": { + "name": "stream_1", + "json_schema": { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "key1": { + "type": ["null", "numeric"] + }, + "list": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } + } + } + }, + "supported_sync_modes": ["full_refresh"], + "supported_destination_sync_modes": ["overwrite", "append", "append_dedup"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append_dedup", + "primary_key": [ [ "id" ] ] + } + ] +} diff --git a/airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_destination.py b/airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_destination.py new file mode 100644 index 0000000000000..49ef1ca288832 --- /dev/null +++ b/airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_destination.py @@ -0,0 +1,82 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + + +import sys +from io import StringIO + +import pytest +from airbyte_cdk import AirbyteLogger +from airbyte_cdk.models import AirbyteConnectionStatus, Status +from destination_google_sheets.destination import DestinationGoogleSheets +from integration_tests.test_buffer import read_input_messages +from integration_tests.test_helpers import TEST_CONFIG +from integration_tests.test_writer import TEST_CATALOG, TEST_SPREADSHEET, TEST_STREAM + +# ----- PREPARE ENV ----- + + +class CaptureStdOut(list): + """ + Captures the stdout messages into the variable list, that could be validated later. + """ + + def __enter__(self): + self._stdout = sys.stdout + sys.stdout = self._stringio = StringIO() + return self + + def __exit__(self, *args): + self.extend(self._stringio.getvalue().splitlines()) + del self._stringio + sys.stdout = self._stdout + + +# define instance +TEST_DESTINATION: DestinationGoogleSheets = DestinationGoogleSheets() +# path to test records txt file +TEST_RECORDS_PATH: str = "integration_tests/test_data/test_destination_messages.txt" + +# ----- BEGIN TESTS ----- + + +def test_check(): + expected = AirbyteConnectionStatus(status=Status.SUCCEEDED) + actual = TEST_DESTINATION.check(logger=AirbyteLogger, config=TEST_CONFIG) + assert actual == expected + + +@pytest.mark.parametrize( + "expected, raised", + [ + ('{"type": "LOG", "log": {"level": "INFO", "message": "Auth session is expired. Refreshing..."}}', False), + ('{"type": "LOG", "log": {"level": "INFO", "message": "Successfully refreshed auth session"}}', False), + ('{"type": "LOG", "log": {"level": "INFO", "message": "Writing data for stream: stream_1"}}', True), + ('{"type": "LOG", "log": {"level": "INFO", "message": "No duplicated records found for stream: stream_1"}}', True), + ], + ids=[ + "token needs refresh", + "token refreshed", + "writing stream", + "no dups found for stream", + ], +) +def test_write(expected, raised): + + # clean worksheet after previous test + TEST_SPREADSHEET.clean_worksheet(TEST_STREAM) + + # perform test + with CaptureStdOut() as output: + list( + TEST_DESTINATION.write( + config=TEST_CONFIG, configured_catalog=TEST_CATALOG, input_messages=read_input_messages(TEST_RECORDS_PATH) + ) + ) + + assert True if not raised else any(msg == expected for msg in output) + + # clean wks after the test + test_wks = TEST_SPREADSHEET.open_worksheet(TEST_STREAM) + TEST_SPREADSHEET.spreadsheet.del_worksheet(test_wks) diff --git a/airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_helpers.py b/airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_helpers.py new file mode 100644 index 0000000000000..eedf355a4c888 --- /dev/null +++ b/airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_helpers.py @@ -0,0 +1,59 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + + +import json +from typing import Any, Mapping + +from airbyte_cdk.models import ConfiguredAirbyteCatalog +from destination_google_sheets.client import GoogleSheetsClient +from destination_google_sheets.helpers import ConnectionTest, get_spreadsheet_id, get_streams_from_catalog +from destination_google_sheets.spreadsheet import GoogleSheets +from pygsheets.client import Client as pygsheets_client + +# ----- PREPARE ENV ----- + + +def get_config(config_path: str = "secrets/config_oauth.json") -> Mapping[str, Any]: + """ + Get the config from /test_input + """ + with open(config_path, "r") as f: + return json.loads(f.read()) + + +# using real config from secrets/config_oauth.json +TEST_CONFIG: dict = get_config() +# client instance +TEST_CLIENT: pygsheets_client = GoogleSheetsClient(TEST_CONFIG).authorize() +# get test spreadsheet_id +TEST_SPREADSHEET_ID: str = get_spreadsheet_id(TEST_CONFIG.get("spreadsheet_id")) +# define test Spreadsheet class +TEST_SPREADSHEET: GoogleSheets = GoogleSheets(TEST_CLIENT, TEST_SPREADSHEET_ID) +# define test stream +TEST_STREAM: str = "test_stream" +# path to configured_catalog json file +TEST_CATALOG_PATH: str = "integration_tests/configured_catalog.json" +# reading prepared catalog with streams +TEST_CATALOG: ConfiguredAirbyteCatalog = ConfiguredAirbyteCatalog.parse_file(TEST_CATALOG_PATH) + + +# ----- BEGIN TESTS ----- + + +def test_connection_test_write(): + check_result = ConnectionTest(TEST_SPREADSHEET).perform_connection_test() + assert check_result is True + + +def test_get_spreadsheet_id(config: dict = TEST_CONFIG): + expected = "1Zi1addRSXvXNf3-fxMhEGlshsgTl6tg76fvzaGjuy50" + spreadsheet_id = get_spreadsheet_id(config.get("spreadsheet_id")) + assert spreadsheet_id == expected + + +def test_get_streams_from_catalog(): + limit_count_streams = 2 + actual = get_streams_from_catalog(TEST_CATALOG, limit_count_streams) + assert len(actual) == limit_count_streams diff --git a/airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_spreadsheet.py b/airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_spreadsheet.py new file mode 100644 index 0000000000000..3db55b15c137f --- /dev/null +++ b/airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_spreadsheet.py @@ -0,0 +1,87 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + + +from destination_google_sheets.client import GoogleSheetsClient +from destination_google_sheets.helpers import get_spreadsheet_id +from destination_google_sheets.spreadsheet import GoogleSheets +from integration_tests.test_helpers import TEST_CONFIG +from pygsheets.client import Client as pygsheets_client + +# ----- PREPARE ENV ----- + + +# client instance +TEST_CLIENT: pygsheets_client = GoogleSheetsClient(TEST_CONFIG).authorize() +# get test spreadsheet_id +TEST_SPREADSHEET_ID: str = get_spreadsheet_id(TEST_CONFIG.get("spreadsheet_id")) +# define test Spreadsheet class +TEST_SPREADSHEET: GoogleSheets = GoogleSheets(TEST_CLIENT, TEST_SPREADSHEET_ID) +# define test stream +TEST_STREAM: str = "test_stream" + + +# ----- BEGIN TESTS ----- + + +def test_spreadsheet(): + assert len(TEST_SPREADSHEET.spreadsheet.worksheets()) > 0 + + +def test_open_worksheet(): + test_wks = TEST_SPREADSHEET.open_worksheet(TEST_STREAM) + assert test_wks.id is not None + + +def test_clean_worksheet(): + TEST_SPREADSHEET.clean_worksheet(TEST_STREAM) + test_wks = TEST_SPREADSHEET.open_worksheet(TEST_STREAM) + records = test_wks.get_all_records() + assert len(records) == 0 + + +def test_set_headers(): + test_headers = ["id", "key"] + TEST_SPREADSHEET.set_headers("test_stream", ["id", "key"]) + test_wks = TEST_SPREADSHEET.open_worksheet(TEST_STREAM) + headers = test_wks[1] + for header in test_headers: + if header in headers: + assert True + + +def test_index_cols(): + expected = {"id": 1, "key": 2, "": 26} + test_wks = TEST_SPREADSHEET.open_worksheet(TEST_STREAM) + col_indexed = TEST_SPREADSHEET.index_cols(test_wks) + assert col_indexed == expected + + +def test_find_duplicates(): + input_values = [[1, "a"], [1, "a"], [2, "b"], [1, "a"], [1, "a"]] + expected = [6, 5, 3] # the 4th row is the duplicate of the 1st. + + test_wks = TEST_SPREADSHEET.open_worksheet(TEST_STREAM) + test_wks.append_table(input_values, start="A2", dimension="ROWS") + test = TEST_SPREADSHEET.find_duplicates(test_wks, "id") + assert test == expected + + +def test_remove_duplicates(): + expected = [{"id": 1, "key": "a"}, {"id": 2, "key": "b"}] + test_wks = TEST_SPREADSHEET.open_worksheet(TEST_STREAM) + # find the duplicated rows + rows_to_remove = TEST_SPREADSHEET.find_duplicates(test_wks, "id") + # remove duplicates + TEST_SPREADSHEET.remove_duplicates(test_wks, rows_to_remove) + records = test_wks.get_all_records() + assert records == expected + + +def test_delete_test_stream(): + test_wks = TEST_SPREADSHEET.open_worksheet(TEST_STREAM) + TEST_SPREADSHEET.spreadsheet.del_worksheet(test_wks) + + +# ----- END TESTS ----- diff --git a/airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_writer.py b/airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_writer.py new file mode 100644 index 0000000000000..1bd6090ce3c5c --- /dev/null +++ b/airbyte-integrations/connectors/destination-google-sheets/integration_tests/test_writer.py @@ -0,0 +1,148 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + +import pytest +from airbyte_cdk.models import ConfiguredAirbyteCatalog +from destination_google_sheets.writer import GoogleSheetsWriter +from integration_tests.test_spreadsheet import TEST_SPREADSHEET + +# ----- PREPARE ENV ----- + + +# path to configured_catalog json file +TEST_CATALOG_PATH: str = "integration_tests/test_data/test_writer_catalog.json" +# reading prepared catalog with streams +TEST_CATALOG: ConfiguredAirbyteCatalog = ConfiguredAirbyteCatalog.parse_file(TEST_CATALOG_PATH) +# define test writer +TEST_WRITER: GoogleSheetsWriter = GoogleSheetsWriter(TEST_SPREADSHEET) +# set flush buffer interval +TEST_WRITER.flush_interval = 2 +# test stream name +TEST_STREAM: str = "stream_1" + + +# ----- BEGIN TESTS ----- + + +def _prepare_buffers(): + for configured_stream in TEST_CATALOG.streams: + TEST_WRITER.init_buffer_stream(configured_stream) + + +def test_delete_stream_entries(): + _prepare_buffers() + TEST_WRITER.delete_stream_entries(TEST_STREAM) + test_wks = TEST_SPREADSHEET.open_worksheet(TEST_STREAM) + records = test_wks.get_all_records() + assert len(records) == 0 + + +def test_check_headers(): + TEST_WRITER.check_headers(TEST_STREAM) + assert True if TEST_WRITER.stream_info[TEST_STREAM]["is_set"] else False + + +# define input records +# 3 records are defined, but 2 should be written, because of flush_interval +# the last one should be available for other tests. +input_records = [ + { + "stream": TEST_STREAM, + "data": {"id": 1}, + }, + { + "stream": TEST_STREAM, + "data": {"id": 2, "key1": "test"}, + }, + { + "stream": TEST_STREAM, + "data": {"id": 3, "key1": "test", "list": ["str_in_list"]}, + }, +] + + +@pytest.mark.parametrize( + "expected", + [ + ([{"id": 1, "key1": "", "list": ""}, {"id": 2, "key1": "test", "list": ""}]), + ], + ids=["2/3 records"], +) +def test_queue_write_operation(expected): + for record in input_records: + stream_name = record["stream"] + data = record["data"] + TEST_WRITER.add_to_buffer(stream_name, data) + TEST_WRITER.queue_write_operation(stream_name) + + # check expected records are written into target worksheet + test_wks = TEST_SPREADSHEET.open_worksheet(stream_name) + records = test_wks.get_all_records() + assert records == expected + + +@pytest.mark.parametrize( + "expected", + [ + ([{"id": 1, "key1": "", "list": ""}, {"id": 2, "key1": "test", "list": ""}, {"id": 3, "key1": "test", "list": "['str_in_list']"}]), + ], + ids=["3/3 records"], +) +def test_write_whats_left(expected): + TEST_WRITER.write_whats_left() + + # check expected records are written into target worksheet + test_wks = TEST_SPREADSHEET.open_worksheet(TEST_STREAM) + records = test_wks.get_all_records() + assert records == expected + + # clean worksheet for future tests + test_wks.clear() + + +input_dup_records = [ + { + "stream": TEST_STREAM, + "data": {"id": 1, "key1": "test"}, + }, + { + "stream": TEST_STREAM, + "data": {"id": 1, "key1": "test"}, + }, +] + + +@pytest.mark.parametrize( + "expected", + [ + ([{"id": 1, "key1": "test", "list": ""}]), + ], + ids=["dedup_records"], +) +def test_deduplicate_records(expected): + # set `is_set` for headers to False + # because previously the headers have been set already + TEST_WRITER.stream_info[TEST_STREAM]["is_set"] = False + + # writing duplicates + for record in input_dup_records: + stream_name = record["stream"] + data = record["data"] + TEST_WRITER.add_to_buffer(stream_name, data) + TEST_WRITER.queue_write_operation(stream_name) + + # removing duplicates + for configured_stream in TEST_CATALOG.streams: + TEST_WRITER.deduplicate_records(configured_stream) + + # checking result + test_wks = TEST_SPREADSHEET.open_worksheet(TEST_STREAM) + records = test_wks.get_all_records() + assert records == expected + + # remove the test worksheet after tests + TEST_SPREADSHEET.spreadsheet.del_worksheet(test_wks) + + +# ----- END TESTS ----- diff --git a/airbyte-integrations/connectors/destination-google-sheets/main.py b/airbyte-integrations/connectors/destination-google-sheets/main.py new file mode 100644 index 0000000000000..29202589faa49 --- /dev/null +++ b/airbyte-integrations/connectors/destination-google-sheets/main.py @@ -0,0 +1,11 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + + +import sys + +from destination_google_sheets import DestinationGoogleSheets + +if __name__ == "__main__": + DestinationGoogleSheets().run(sys.argv[1:]) diff --git a/airbyte-integrations/connectors/destination-google-sheets/requirements.txt b/airbyte-integrations/connectors/destination-google-sheets/requirements.txt new file mode 100644 index 0000000000000..d6e1198b1ab1f --- /dev/null +++ b/airbyte-integrations/connectors/destination-google-sheets/requirements.txt @@ -0,0 +1 @@ +-e . diff --git a/airbyte-integrations/connectors/destination-google-sheets/setup.py b/airbyte-integrations/connectors/destination-google-sheets/setup.py new file mode 100644 index 0000000000000..fb61d56b08fd2 --- /dev/null +++ b/airbyte-integrations/connectors/destination-google-sheets/setup.py @@ -0,0 +1,28 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk", + "pygsheets==2.0.5", + "google-auth-oauthlib==0.5.1", + "google-api-python-client==2.47.0", +] + +TEST_REQUIREMENTS = ["pytest~=6.1", "requests-mock"] + +setup( + name="destination_google_sheets", + description="Destination implementation for Google Sheets.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-webapp/src/views/Connector/ServiceForm/components/Sections/auth/AuthButton.tsx b/airbyte-webapp/src/views/Connector/ServiceForm/components/Sections/auth/AuthButton.tsx index 676a1c1258f2d..e8ee5a8b29eee 100644 --- a/airbyte-webapp/src/views/Connector/ServiceForm/components/Sections/auth/AuthButton.tsx +++ b/airbyte-webapp/src/views/Connector/ServiceForm/components/Sections/auth/AuthButton.tsx @@ -30,7 +30,8 @@ function isGoogleConnector(connectorDefinitionId: string): boolean { "eff3616a-f9c3-11eb-9a03-0242ac130003", // google analytics "d19ae824-e289-4b14-995a-0632eb46d246", // google directory "eb4c9e00-db83-4d63-a386-39cfa91012a8", // google search console - "71607ba1-c0ac-4799-8049-7f4b90dd50f7", // google sheets + "71607ba1-c0ac-4799-8049-7f4b90dd50f7", // google sheets source + "a4cbd2d1-8dbe-4818-b8bc-b90ad782d12a", // google sheets destination "ed9dfefa-1bbc-419d-8c5e-4d78f0ef6734", // google workspace admin reports ].includes(connectorDefinitionId); } diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md index 6800470c95f72..c872ea86f16c8 100644 --- a/docs/SUMMARY.md +++ b/docs/SUMMARY.md @@ -195,6 +195,7 @@ - [Google Cloud Storage (GCS)](integrations/destinations/gcs.md) - [Google Firestore](integrations/destinations/firestore.md) - [Google PubSub](integrations/destinations/pubsub.md) + - [Google Sheets](integrations/destinations/google-sheets.md) - [Kafka](integrations/destinations/kafka.md) - [Keen](integrations/destinations/keen.md) - [Local CSV](integrations/destinations/local-csv.md) diff --git a/docs/integrations/README.md b/docs/integrations/README.md index a92a127100841..fb0a47a8f3387 100644 --- a/docs/integrations/README.md +++ b/docs/integrations/README.md @@ -182,6 +182,7 @@ For more information about the grading system, see [Product Release Stages](http | [End-to-End Testing](destinations/e2e-test.md) | Alpha | Yes | | [Google Cloud Storage (GCS)](destinations/gcs.md) | Beta | Yes | | [Google Pubsub](destinations/pubsub.md) | Alpha | Yes | +| [Google Sheets](destinations/google-sheets.md) | Alpha | Yes | | [Kafka](destinations/kafka.md) | Alpha | No | | [Keen](destinations/keen.md) | Alpha | No | | [Kinesis](destinations/kinesis.md) | Alpha | No | diff --git a/docs/integrations/destinations/google-sheets.md b/docs/integrations/destinations/google-sheets.md new file mode 100644 index 0000000000000..4ef3dd6901fe0 --- /dev/null +++ b/docs/integrations/destinations/google-sheets.md @@ -0,0 +1,111 @@ +# Google Sheets + +## Sync overview + +The Google Sheets Destination is configured to push data to a single Google Sheets spreadsheet with multiple Worksheets as streams. To replicate data to multiple spreadsheets, you can create multiple instances of the Google Sheets Destination in your Airbyte instance. +Please be aware of the [Google Spreadsheet limitations](#limitations) before you configure your airbyte data replication using Destination Google Sheets + +### Output schema + +Each worksheet in the selected spreadsheet will be the output as a separate source-connector stream. The data is coerced to string before the output to the spreadsheet. The nested data inside of the source connector data is normalized to the `first-level-nesting` and represented as string, this produces nested lists and objects to be a string rather than normal lists and objects, the further data processing is required if you need to analyze the data. + +Airbyte only supports replicating `Grid Sheets`, which means the text raw data only could be replicated to the target spreadsheet. See the [Google Sheets API docs](https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets/sheets#SheetType) for more info on all available sheet types. + +#### Note: +* The output columns are ordered alphabetically. The output columns should not be reordered manually after the sync, this could cause the data corruption for all next syncs. +* The underlying process of record normalization is applied to avoid data corruption during the write process. This handles two scenarios: +1. UnderSetting - when record has less keys (columns) than catalog declares +2. OverSetting - when record has more keys (columns) than catalog declares +``` +EXAMPLE: + +- UnderSetting: + * Catalog: + - has 3 entities: + [ 'id', 'key1', 'key2' ] + ^ + * Input record: + - missing 1 entity, compare to catalog + { 'id': 123, 'key2': 'value' } + ^ + * Result: + - 'key1' has been added to the record, because it was declared in catalog, to keep the data structure. + {'id': 123, 'key1': '', {'key2': 'value'} } + ^ +- OverSetting: + * Catalog: + - has 3 entities: + [ 'id', 'key1', 'key2', ] + ^ + * Input record: + - doesn't have entity 'key1' + - has 1 more enitity, compare to catalog 'key3' + { 'id': 123, ,'key2': 'value', 'key3': 'value' } + ^ ^ + * Result: + - 'key1' was added, because it expected be the part of the record, to keep the data structure + - 'key3' was dropped, because it was not declared in catalog, to keep the data structure + { 'id': 123, 'key1': '', 'key2': 'value', } + ^ ^ +``` + +### Data type mapping + +| Integration Type | Airbyte Type | +| :--- | :--- | +| Any Type | `string` | + +### Features + +| Feature | Supported?\(Yes/No\) | +| :--- | :--- | +| Ful-Refresh Overwrite | Yes | +| Ful-Refresh Append | Yes | +| Incremental Append | Yes | +| Incremental Append-Deduplicate | Yes | + +### Performance considerations + +At the time of writing, the [Google API rate limit](https://developers.google.com/sheets/api/limits) is 100 requests per 100 seconds per user and 500 requests per 100 seconds per project. Airbyte batches requests to the API in order to efficiently pull data and respects these rate limits. It is recommended that you use the same service user \(see the "Creating a service user" section below for more information on how to create one\) for no more than 3 instances of the Google Sheets Destination to ensure high transfer speeds. + +### Google Sheets Limitations + +During the upload process and from the data storage perspective there are some limitations that should be considered beforehands: +* **Maximum of 5 Million Cells** + +A Google Sheets document can have a maximum of 5 million cells. These can be in a single worksheet or in multiple sheets. +In case you already have the 5 million limit reached in fewer columns, it will not allow you to add more columns (and vice versa, i.e., if 5 million cells limit is reached with a certain number of rows, it will not allow more rows). + +* **Maximum of 18,278 Columns** + +At max, you can have 18,278 columns in Google Sheets in a worksheet. + +* **Up to 200 Worksheets in a Spreadsheet** + +You cannot create more than 200 worksheets within single spreadsheet. + + +## Getting Started (Airbyte Cloud Only) +To configure the connector you'll need to: + +* [Authorize your Google account via OAuth](#oauth) +* [The Full URL or Spreadsheet ID you'd like to sync](#sheetlink) + +### Authorize your Google account via OAuth +Click on the "Sign in with Google" button and authorize via your Google account. + +### Spreadsheet Link +You will need the link of the Spreadsheet you'd like to sync. To get it, click Share button in the top right corner of Google Sheets interface, and then click Copy Link in the dialog that pops up. +These two steps are highlighted in the screenshot below: + +![](../../.gitbook/assets/google_spreadsheet_url.png) + + +#### Future improvements: +- Handle multiple spreadsheets to split big amount of data into parts, once the main spreadsheet is full and cannot be extended more, due to [limitations](#limitations). + +## Changelog + +| Version | Date | Pull Request | Subject | +|---------|------------|------------------------------------------------------------|----------------------------------------| +| 0.1.0 | 2022-04-26 | [12135](https://github.com/airbytehq/airbyte/pull/12135) | Initial Release | From 5ff96ab946004a46b3779bd598a417c066a9711f Mon Sep 17 00:00:00 2001 From: Evan Tahler Date: Wed, 11 May 2022 16:22:16 -0700 Subject: [PATCH 33/55] Use `host.docker.internal` for acceptance testing on macs (#12791) * Use `host.docker.internal` for acceptance testing * Lint * conditional hostname for mac only --- .../java/io/airbyte/test/acceptance/AcceptanceTests.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/AcceptanceTests.java b/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/AcceptanceTests.java index fbee095ff3abb..030a83b8dd474 100644 --- a/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/AcceptanceTests.java +++ b/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/AcceptanceTests.java @@ -174,6 +174,7 @@ public class AcceptanceTests { private static final boolean IS_KUBE = System.getenv().containsKey("KUBE"); private static final boolean IS_MINIKUBE = System.getenv().containsKey("IS_MINIKUBE"); private static final boolean IS_GKE = System.getenv().containsKey("IS_GKE"); + private static final boolean IS_MAC = System.getProperty("os.name").startsWith("Mac"); private static final boolean USE_EXTERNAL_DEPLOYMENT = System.getenv("USE_EXTERNAL_DEPLOYMENT") != null && System.getenv("USE_EXTERNAL_DEPLOYMENT").equalsIgnoreCase("true"); @@ -1548,6 +1549,8 @@ private Map localConfig(final PostgreSQLContainer psql, final bo // used on a single node with docker driver dbConfig.put("host", "host.docker.internal"); } + } else if (IS_MAC) { + dbConfig.put("host", "host.docker.internal"); } else { dbConfig.put("host", "localhost"); } From 4b5bd2b5a471a659fe0173df44d92fffcee845f3 Mon Sep 17 00:00:00 2001 From: VitaliiMaltsev <39538064+VitaliiMaltsev@users.noreply.github.com> Date: Thu, 12 May 2022 12:30:54 +0300 Subject: [PATCH 34/55] Redshift Destination Apply buffering strategy (#12601) * Redshift Destination: Apply buffering strategy * add manifest uploading * refactoring * fixed checkstyle * updated CHANGELOG * removed redundant static * airbyte-12265: Added stagingOperations.onDestinationCloseOperations() to StagingConsumerFactory.java. * airbyte-12265: Created operations and copiers java packages. * safe delete of RedshiftCopyS3Destination.java * rename tests * bump version * auto-bump connector version Co-authored-by: Oleksandr Tsukanov Co-authored-by: Octavia Squidington III --- .../seed/destination_definitions.yaml | 2 +- .../resources/seed/destination_specs.yaml | 54 +++--- .../staging/StagingConsumerFactory.java | 3 +- .../destination-redshift/Dockerfile | 2 +- .../redshift/RedshiftCopyS3Destination.java | 88 ---------- .../redshift/RedshiftDestination.java | 4 +- .../redshift/RedshiftInsertDestination.java | 3 +- .../RedshiftStagingS3Destination.java | 109 ++++++++++++ .../{ => copiers}/RedshiftStreamCopier.java | 2 +- .../RedshiftStreamCopierFactory.java | 2 +- .../RedshiftS3StagingSqlOperations.java | 158 ++++++++++++++++++ .../RedshiftSqlOperations.java | 4 +- ...dshiftInsertDestinationAcceptanceTest.java | 2 +- ...tagingInsertDestinationAcceptanceTest.java | 2 +- ...ftStagingS3DestinationAcceptanceTest.java} | 7 +- .../RedshiftStreamCopierTest.java | 2 +- .../destination/s3/S3StorageOperations.java | 8 +- docs/integrations/destinations/redshift.md | 1 + 18 files changed, 323 insertions(+), 130 deletions(-) delete mode 100644 airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftCopyS3Destination.java create mode 100644 airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftStagingS3Destination.java rename airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/{ => copiers}/RedshiftStreamCopier.java (99%) rename airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/{ => copiers}/RedshiftStreamCopierFactory.java (95%) create mode 100644 airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/operations/RedshiftS3StagingSqlOperations.java rename airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/{ => operations}/RedshiftSqlOperations.java (98%) rename airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/{RedshiftCopyDestinationAcceptanceTest.java => RedshiftStagingS3DestinationAcceptanceTest.java} (94%) rename airbyte-integrations/connectors/destination-redshift/src/test/java/io/airbyte/integrations/destination/redshift/{ => copiers}/RedshiftStreamCopierTest.java (99%) diff --git a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml index b2c73015c1fff..8d80c6d16c037 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml @@ -218,7 +218,7 @@ - name: Redshift destinationDefinitionId: f7a7d195-377f-cf5b-70a5-be6b819019dc dockerRepository: airbyte/destination-redshift - dockerImageTag: 0.3.32 + dockerImageTag: 0.3.33 documentationUrl: https://docs.airbyte.io/integrations/destinations/redshift icon: redshift.svg resourceRequirements: diff --git a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml index d2396ed7deedd..3db14f30d966d 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml @@ -3461,7 +3461,7 @@ supported_destination_sync_modes: - "overwrite" - "append" -- dockerImage: "airbyte/destination-redshift:0.3.32" +- dockerImage: "airbyte/destination-redshift:0.3.33" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/redshift" connectionSpecification: @@ -3514,27 +3514,30 @@ default: "public" title: "Default Schema" s3_bucket_name: - title: "S3 Bucket Name" + title: "S3 Bucket Name (Optional)" type: "string" description: "The name of the staging S3 bucket to use if utilising a COPY\ \ strategy. COPY is recommended for production workloads for better speed\ - \ and scalability. See AWS docs for more details." examples: - "airbyte.staging" s3_bucket_path: - title: "S3 Bucket Path" + title: "S3 Bucket Path (Optional)" type: "string" description: "The directory under the S3 bucket where data will be written.\ - \ If not provided, then defaults to the root directory." + \ If not provided, then defaults to the root directory. See path's name recommendations for more details." examples: - "data_sync/test" s3_bucket_region: - title: "S3 Bucket Region" + title: "S3 Bucket Region (Optional)" type: "string" default: "" description: "The region of the S3 staging bucket to use if utilising a\ - \ copy strategy." + \ COPY strategy. See AWS docs for details." enum: - "" - "us-east-1" @@ -3562,15 +3565,18 @@ - "me-south-1" access_key_id: type: "string" - description: "The Access Key Id granting allow one to access the above S3\ - \ staging bucket. Airbyte requires Read and Write permissions to the given\ - \ bucket." - title: "S3 Key Id" + description: "This ID grants access to the above S3 staging bucket. Airbyte\ + \ requires Read and Write permissions to the given bucket. See AWS docs on how to generate an access key ID and secret access key." + title: "S3 Key Id (Optional)" airbyte_secret: true secret_access_key: type: "string" - description: "The corresponding secret to the above access key id." - title: "S3 Access Key" + description: "The corresponding secret to the above access key id. See AWS docs on how to generate an access key ID and secret access key." + title: "S3 Access Key (Optional)" airbyte_secret: true part_size: type: "integer" @@ -3578,20 +3584,22 @@ maximum: 100 examples: - "10" - description: "Optional. Increase this if syncing tables larger than 100GB.\ - \ Only relevant for COPY. Files are streamed to S3 in parts. This determines\ - \ the size of each part, in MBs. As S3 has a limit of 10,000 parts per\ - \ file, part size affects the table size. This is 10MB by default, resulting\ - \ in a default limit of 100GB tables. Note, a larger part size will result\ + description: "Increase this if syncing tables larger than 100GB. Only relevant\ + \ for COPY. Files are streamed to S3 in parts. This determines the size\ + \ of each part, in MBs. As S3 has a limit of 10,000 parts per file, part\ + \ size affects the table size. This is 10MB by default, resulting in a\ + \ default limit of 100GB tables. Note: a larger part size will result\ \ in larger memory requirements. A rule of thumb is to multiply the part\ - \ size by 10 to get the memory requirement. Modify this with care." - title: "Stream Part Size" + \ size by 10 to get the memory requirement. Modify this with care. See\ + \ docs for details." + title: "Stream Part Size (Optional)" purge_staging_data: - title: "Purge Staging Files and Tables" + title: "Purge Staging Files and Tables (Optional)" type: "boolean" description: "Whether to delete the staging files from S3 after completing\ - \ the sync. See the docs for details. Only relevant for COPY. Defaults\ - \ to true." + \ the sync. See docs for details." default: true supportsIncremental: true supportsNormalization: true diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/staging/StagingConsumerFactory.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/staging/StagingConsumerFactory.java index 1da2beb67669e..469b53f205f30 100644 --- a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/staging/StagingConsumerFactory.java +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/staging/StagingConsumerFactory.java @@ -204,7 +204,6 @@ private OnCloseFunction onCloseFunction(final JdbcDatabase database, throw new RuntimeException("Failed to upload data from stage " + stagingPath, e); } writeConfig.clearStagedFiles(); - stagingOperations.createTableIfNotExists(database, schemaName, dstTableName); switch (writeConfig.getSyncMode()) { case OVERWRITE -> queryList.add(stagingOperations.truncateTableQuery(database, schemaName, dstTableName)); @@ -213,7 +212,7 @@ private OnCloseFunction onCloseFunction(final JdbcDatabase database, } queryList.add(stagingOperations.copyTableQuery(database, schemaName, srcTableName, dstTableName)); } - + stagingOperations.onDestinationCloseOperations(database, writeConfigs.stream().map(WriteConfig::getOutputSchemaName).collect(Collectors.toSet())); LOGGER.info("Executing finalization of tables."); stagingOperations.executeTransaction(database, queryList); LOGGER.info("Finalizing tables in destination completed."); diff --git a/airbyte-integrations/connectors/destination-redshift/Dockerfile b/airbyte-integrations/connectors/destination-redshift/Dockerfile index 20f394fc6f3b1..127b3bd3b5b01 100644 --- a/airbyte-integrations/connectors/destination-redshift/Dockerfile +++ b/airbyte-integrations/connectors/destination-redshift/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-redshift COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.3.32 +LABEL io.airbyte.version=0.3.33 LABEL io.airbyte.name=airbyte/destination-redshift diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftCopyS3Destination.java b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftCopyS3Destination.java deleted file mode 100644 index c45e9d250aa15..0000000000000 --- a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftCopyS3Destination.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Copyright (c) 2021 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.redshift; - -import static io.airbyte.integrations.destination.redshift.RedshiftInsertDestination.getJdbcDatabase; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.db.jdbc.JdbcDatabase; -import io.airbyte.integrations.base.AirbyteMessageConsumer; -import io.airbyte.integrations.destination.ExtendedNameTransformer; -import io.airbyte.integrations.destination.jdbc.SqlOperations; -import io.airbyte.integrations.destination.jdbc.copy.CopyConsumerFactory; -import io.airbyte.integrations.destination.jdbc.copy.CopyDestination; -import io.airbyte.integrations.destination.jdbc.copy.s3.S3CopyConfig; -import io.airbyte.integrations.destination.redshift.enums.RedshiftDataTmpTableMode; -import io.airbyte.integrations.destination.s3.S3Destination; -import io.airbyte.integrations.destination.s3.S3DestinationConfig; -import io.airbyte.integrations.destination.s3.S3StorageOperations; -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import java.util.function.Consumer; - -/** - * A more efficient Redshift Destination than the sql-based {@link RedshiftDestination}. Instead of - * inserting data as batched SQL INSERTs, we follow Redshift best practices and, 1) Stream the data - * to S3, creating multiple compressed files per stream. 2) Create a manifest file to load the data - * files in parallel. See: - * https://docs.aws.amazon.com/redshift/latest/dg/c_best-practices-use-copy.html for more info. - *

- * Creating multiple files per stream currently has the naive approach of one file per batch on a - * stream up to the max limit of (26 * 26 * 26) 17576 files. Each batch is randomly prefixed by 3 - * Alpha characters and on a collision the batch is appended to the existing file. - */ -public class RedshiftCopyS3Destination extends CopyDestination { - - private final RedshiftDataTmpTableMode redshiftDataTmpTableMode; - - public RedshiftCopyS3Destination(RedshiftDataTmpTableMode redshiftDataTmpTableMode) { - this.redshiftDataTmpTableMode = redshiftDataTmpTableMode; - } - - @Override - public AirbyteMessageConsumer getConsumer(final JsonNode config, - final ConfiguredAirbyteCatalog catalog, - final Consumer outputRecordCollector) - throws Exception { - return CopyConsumerFactory.create( - outputRecordCollector, - getDatabase(config), - getSqlOperations(), - getNameTransformer(), - S3CopyConfig.getS3CopyConfig(config), - catalog, - new RedshiftStreamCopierFactory(), - getConfiguredSchema(config)); - } - - @Override - public void checkPersistence(final JsonNode config) throws Exception { - final S3DestinationConfig s3Config = getS3DestinationConfig(config); - S3Destination.attemptS3WriteAndDelete(new S3StorageOperations(getNameTransformer(), s3Config.getS3Client(), s3Config), s3Config, ""); - } - - @Override - public ExtendedNameTransformer getNameTransformer() { - return new RedshiftSQLNameTransformer(); - } - - @Override - public JdbcDatabase getDatabase(final JsonNode config) { - return getJdbcDatabase(config); - } - - public SqlOperations getSqlOperations() { - return new RedshiftSqlOperations(redshiftDataTmpTableMode); - } - - private String getConfiguredSchema(final JsonNode config) { - return config.get("schema").asText(); - } - - private S3DestinationConfig getS3DestinationConfig(final JsonNode config) { - return S3DestinationConfig.getS3DestinationConfig(config); - } - -} diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftDestination.java b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftDestination.java index bf283eb31a67f..883a752ccf88f 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftDestination.java +++ b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftDestination.java @@ -19,7 +19,7 @@ * {@link RedshiftInsertDestination} for more detail. The second inserts via streaming the data to * an S3 bucket, and Cop-ing the date into Redshift. This is more efficient, and recommended for * production workloads, but does require users to set up an S3 bucket and pass in additional - * credentials. See {@link RedshiftCopyS3Destination} for more detail. This class inspect the given + * credentials. See {@link RedshiftStagingS3Destination} for more detail. This class inspect the given * arguments to determine which strategy to use. */ public class RedshiftDestination extends SwitchingDestination { @@ -42,7 +42,7 @@ public static DestinationType getTypeFromConfig(final JsonNode config) { public static Map getTypeToDestination() { return Map.of( DestinationType.INSERT_WITH_SUPER_TMP_TYPE, new RedshiftInsertDestination(RedshiftDataTmpTableMode.SUPER), - DestinationType.COPY_S3_WITH_SUPER_TMP_TYPE, new RedshiftCopyS3Destination(RedshiftDataTmpTableMode.SUPER)); + DestinationType.COPY_S3_WITH_SUPER_TMP_TYPE, new RedshiftStagingS3Destination(RedshiftDataTmpTableMode.SUPER)); } public static DestinationType determineUploadMode(final JsonNode config) { diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftInsertDestination.java b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftInsertDestination.java index 576323918e64e..d0cca6e62c3cf 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftInsertDestination.java +++ b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftInsertDestination.java @@ -13,12 +13,13 @@ import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.destination.jdbc.AbstractJdbcDestination; import io.airbyte.integrations.destination.redshift.enums.RedshiftDataTmpTableMode; +import io.airbyte.integrations.destination.redshift.operations.RedshiftSqlOperations; import java.util.Map; import java.util.Optional; public class RedshiftInsertDestination extends AbstractJdbcDestination { - private static final String DRIVER_CLASS = DatabaseDriver.REDSHIFT.getDriverClassName(); + public static final String DRIVER_CLASS = "com.amazon.redshift.jdbc.Driver"; private static final String USERNAME = "username"; private static final String PASSWORD = "password"; private static final String SCHEMA = "schema"; diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftStagingS3Destination.java b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftStagingS3Destination.java new file mode 100644 index 0000000000000..0f74974e0123f --- /dev/null +++ b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftStagingS3Destination.java @@ -0,0 +1,109 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.redshift; + +import static io.airbyte.integrations.destination.redshift.RedshiftInsertDestination.SSL_JDBC_PARAMETERS; +import static io.airbyte.integrations.destination.redshift.RedshiftInsertDestination.getJdbcDatabase; +import static io.airbyte.integrations.destination.s3.S3DestinationConfig.getS3DestinationConfig; + +import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.commons.json.Jsons; +import io.airbyte.db.jdbc.JdbcDatabase; +import io.airbyte.integrations.base.AirbyteMessageConsumer; +import io.airbyte.integrations.base.Destination; +import io.airbyte.integrations.base.sentry.AirbyteSentry; +import io.airbyte.integrations.destination.NamingConventionTransformer; +import io.airbyte.integrations.destination.jdbc.AbstractJdbcDestination; +import io.airbyte.integrations.destination.record_buffer.FileBuffer; +import io.airbyte.integrations.destination.redshift.enums.RedshiftDataTmpTableMode; +import io.airbyte.integrations.destination.redshift.operations.RedshiftS3StagingSqlOperations; +import io.airbyte.integrations.destination.redshift.operations.RedshiftSqlOperations; +import io.airbyte.integrations.destination.s3.S3Destination; +import io.airbyte.integrations.destination.s3.S3DestinationConfig; +import io.airbyte.integrations.destination.s3.S3StorageOperations; +import io.airbyte.integrations.destination.s3.csv.CsvSerializedBuffer; +import io.airbyte.integrations.destination.staging.StagingConsumerFactory; +import io.airbyte.protocol.models.AirbyteConnectionStatus; +import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import java.util.Map; +import java.util.function.Consumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class RedshiftStagingS3Destination extends AbstractJdbcDestination implements Destination { + + private static final Logger LOGGER = LoggerFactory.getLogger(RedshiftStagingS3Destination.class); + private final RedshiftDataTmpTableMode redshiftDataTmpTableMode; + + public RedshiftStagingS3Destination(RedshiftDataTmpTableMode redshiftDataTmpTableMode) { + super(RedshiftInsertDestination.DRIVER_CLASS, new RedshiftSQLNameTransformer(), new RedshiftSqlOperations(redshiftDataTmpTableMode)); + this.redshiftDataTmpTableMode = redshiftDataTmpTableMode; + } + + @Override + public AirbyteConnectionStatus check(final JsonNode config) { + final S3DestinationConfig s3Config = getS3DestinationConfig(config); + S3Destination.attemptS3WriteAndDelete(new S3StorageOperations(new RedshiftSQLNameTransformer(), s3Config.getS3Client(), s3Config), s3Config, ""); + + final NamingConventionTransformer nameTransformer = getNamingResolver(); + final RedshiftS3StagingSqlOperations redshiftS3StagingSqlOperations = + new RedshiftS3StagingSqlOperations(nameTransformer, s3Config.getS3Client(), s3Config, redshiftDataTmpTableMode); + try (final JdbcDatabase database = getDatabase(config)) { + final String outputSchema = super.getNamingResolver().getIdentifier(config.get("schema").asText()); + AirbyteSentry.executeWithTracing("CreateAndDropTable", + () -> attemptSQLCreateAndDropTableOperations(outputSchema, database, nameTransformer, redshiftS3StagingSqlOperations)); + return new AirbyteConnectionStatus().withStatus(AirbyteConnectionStatus.Status.SUCCEEDED); + } catch (final Exception e) { + LOGGER.error("Exception while checking connection: ", e); + return new AirbyteConnectionStatus() + .withStatus(AirbyteConnectionStatus.Status.FAILED) + .withMessage("Could not connect with provided configuration. \n" + e.getMessage()); + } + + } + + @Override + protected JdbcDatabase getDatabase(final JsonNode config) { + return getJdbcDatabase(config); + } + + @Override + protected NamingConventionTransformer getNamingResolver() { + return new RedshiftSQLNameTransformer(); + } + + @Override + protected Map getDefaultConnectionProperties(JsonNode config) { + return SSL_JDBC_PARAMETERS; + } + + // this is a no op since we override getDatabase. + @Override + public JsonNode toJdbcConfig(JsonNode config) { + return Jsons.emptyObject(); + } + + @Override + public AirbyteMessageConsumer getConsumer(final JsonNode config, + final ConfiguredAirbyteCatalog catalog, + final Consumer outputRecordCollector) { + final S3DestinationConfig s3Config = getS3DestinationConfig(config); + return new StagingConsumerFactory().create( + outputRecordCollector, + getDatabase(config), + new RedshiftS3StagingSqlOperations(getNamingResolver(), s3Config.getS3Client(), s3Config, redshiftDataTmpTableMode), + getNamingResolver(), + CsvSerializedBuffer.createFunction(null, () -> new FileBuffer(CsvSerializedBuffer.CSV_GZ_SUFFIX)), + config, + catalog, + isPurgeStagingData(config)); + } + + private boolean isPurgeStagingData(final JsonNode config) { + return !config.has("purge_staging_data") || config.get("purge_staging_data").asBoolean(); + } + +} diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftStreamCopier.java b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/copiers/RedshiftStreamCopier.java similarity index 99% rename from airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftStreamCopier.java rename to airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/copiers/RedshiftStreamCopier.java index bd5d7c39067d0..aee4a963bdf4a 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftStreamCopier.java +++ b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/copiers/RedshiftStreamCopier.java @@ -2,7 +2,7 @@ * Copyright (c) 2021 Airbyte, Inc., all rights reserved. */ -package io.airbyte.integrations.destination.redshift; +package io.airbyte.integrations.destination.redshift.copiers; import com.amazonaws.services.s3.AmazonS3; import com.fasterxml.jackson.databind.ObjectMapper; diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftStreamCopierFactory.java b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/copiers/RedshiftStreamCopierFactory.java similarity index 95% rename from airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftStreamCopierFactory.java rename to airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/copiers/RedshiftStreamCopierFactory.java index 9876f03800b29..8b5b11c517981 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftStreamCopierFactory.java +++ b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/copiers/RedshiftStreamCopierFactory.java @@ -2,7 +2,7 @@ * Copyright (c) 2021 Airbyte, Inc., all rights reserved. */ -package io.airbyte.integrations.destination.redshift; +package io.airbyte.integrations.destination.redshift.copiers; import com.amazonaws.services.s3.AmazonS3; import io.airbyte.db.jdbc.JdbcDatabase; diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/operations/RedshiftS3StagingSqlOperations.java b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/operations/RedshiftS3StagingSqlOperations.java new file mode 100644 index 0000000000000..36a3269eddcd6 --- /dev/null +++ b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/operations/RedshiftS3StagingSqlOperations.java @@ -0,0 +1,158 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.redshift.operations; + +import com.amazonaws.services.s3.AmazonS3; +import com.fasterxml.jackson.databind.ObjectMapper; +import io.airbyte.commons.lang.Exceptions; +import io.airbyte.db.jdbc.JdbcDatabase; +import io.airbyte.integrations.base.sentry.AirbyteSentry; +import io.airbyte.integrations.destination.NamingConventionTransformer; +import io.airbyte.integrations.destination.record_buffer.SerializableBuffer; +import io.airbyte.integrations.destination.redshift.enums.RedshiftDataTmpTableMode; +import io.airbyte.integrations.destination.redshift.manifest.Entry; +import io.airbyte.integrations.destination.redshift.manifest.Manifest; +import io.airbyte.integrations.destination.s3.S3DestinationConfig; +import io.airbyte.integrations.destination.s3.S3StorageOperations; +import io.airbyte.integrations.destination.s3.credential.S3AccessKeyCredentialConfig; +import io.airbyte.integrations.destination.staging.StagingOperations; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.UUID; +import java.util.stream.Collectors; +import org.joda.time.DateTime; + +public class RedshiftS3StagingSqlOperations extends RedshiftSqlOperations implements StagingOperations { + + private final NamingConventionTransformer nameTransformer; + private final S3StorageOperations s3StorageOperations; + private final S3DestinationConfig s3Config; + private final ObjectMapper objectMapper; + + public RedshiftS3StagingSqlOperations(NamingConventionTransformer nameTransformer, + AmazonS3 s3Client, + S3DestinationConfig s3Config, + RedshiftDataTmpTableMode redshiftDataTmpTableMode) { + super(redshiftDataTmpTableMode); + this.nameTransformer = nameTransformer; + this.s3StorageOperations = new S3StorageOperations(nameTransformer, s3Client, s3Config); + this.s3Config = s3Config; + this.objectMapper = new ObjectMapper(); + } + + @Override + public String getStageName(String namespace, String streamName) { + return nameTransformer.applyDefaultCase(String.join("_", + nameTransformer.convertStreamName(namespace), + nameTransformer.convertStreamName(streamName))); + } + + @Override + public String getStagingPath(UUID connectionId, String namespace, String streamName, DateTime writeDatetime) { + return nameTransformer.applyDefaultCase(String.format("%s/%s_%02d_%02d_%02d_%s/", + getStageName(namespace, streamName), + writeDatetime.year().get(), + writeDatetime.monthOfYear().get(), + writeDatetime.dayOfMonth().get(), + writeDatetime.hourOfDay().get(), + connectionId)); + } + + @Override + public void createStageIfNotExists(JdbcDatabase database, String stageName) throws Exception { + AirbyteSentry.executeWithTracing("CreateStageIfNotExists", + () -> s3StorageOperations.createBucketObjectIfNotExists(stageName), + Map.of("stage", stageName)); + } + + @Override + public String uploadRecordsToStage(JdbcDatabase database, SerializableBuffer recordsData, String schemaName, String stageName, String stagingPath) + throws Exception { + return s3StorageOperations.uploadRecordsToBucket(recordsData, schemaName, stageName, stagingPath); + } + + private String putManifest(final String manifestContents, String stagingPath) { + String manifestFilePath = stagingPath + String.format("%s.manifest", UUID.randomUUID()); + AirbyteSentry.executeWithTracing("CreateStageIfNotExists", + () -> s3StorageOperations.uploadManifest(s3Config.getBucketName(), manifestFilePath, manifestContents), + Map.of("stagingPath", stagingPath, "manifestPath", manifestFilePath)); + return manifestFilePath; + } + + @Override + public void copyIntoTmpTableFromStage(JdbcDatabase database, + String stageName, + String stagingPath, + List stagedFiles, + String dstTableName, + String schemaName) + throws Exception { + LOGGER.info("Starting copy to tmp table from stage: {} in destination from stage: {}, schema: {}, .", dstTableName, stagingPath, schemaName); + final var possibleManifest = Optional.ofNullable(createManifest(stagedFiles, stagingPath)); + AirbyteSentry.executeWithTracing("CopyIntoTableFromStage", + () -> Exceptions.toRuntime(() -> possibleManifest.stream() + .map(manifestContent -> putManifest(manifestContent, stagingPath)) + .forEach(manifestPath -> executeCopy(manifestPath, database, schemaName, dstTableName))), + Map.of("schema", schemaName, "path", stagingPath, "table", dstTableName)); + LOGGER.info("Copy to tmp table {}.{} in destination complete.", schemaName, dstTableName); + } + + private void executeCopy(final String manifestPath, JdbcDatabase db, String schemaName, String tmpTableName) { + final S3AccessKeyCredentialConfig credentialConfig = (S3AccessKeyCredentialConfig) s3Config.getS3CredentialConfig(); + final var copyQuery = String.format( + """ + COPY %s.%s FROM '%s' + CREDENTIALS 'aws_access_key_id=%s;aws_secret_access_key=%s' + CSV GZIP + REGION '%s' TIMEFORMAT 'auto' + STATUPDATE OFF + MANIFEST;""", + schemaName, + tmpTableName, + getFullS3Path(s3Config.getBucketName(), manifestPath), + credentialConfig.getAccessKeyId(), + credentialConfig.getSecretAccessKey(), + s3Config.getBucketRegion()); + + Exceptions.toRuntime(() -> db.execute(copyQuery)); + } + + private String createManifest(List stagedFiles, String stagingPath) { + if (stagedFiles.isEmpty()) { + return null; + } + + final var s3FileEntries = stagedFiles.stream() + .map(file -> new Entry(getManifestPath(s3Config.getBucketName(), file, stagingPath))) + .collect(Collectors.toList()); + final var manifest = new Manifest(s3FileEntries); + + return Exceptions.toRuntime(() -> objectMapper.writeValueAsString(manifest)); + } + + private static String getFullS3Path(final String s3BucketName, final String s3StagingFile) { + return String.join("/", "s3:/", s3BucketName, s3StagingFile); + } + + private static String getManifestPath(final String s3BucketName, final String s3StagingFile, final String stagingPath) { + return "s3://" + s3BucketName + "/" + stagingPath + s3StagingFile; + } + + @Override + public void cleanUpStage(JdbcDatabase database, String stageName, List stagedFiles) throws Exception { + AirbyteSentry.executeWithTracing("CleanStage", + () -> s3StorageOperations.cleanUpBucketObject(stageName, stagedFiles), + Map.of("stage", stageName)); + } + + @Override + public void dropStageIfExists(JdbcDatabase database, String stageName) throws Exception { + AirbyteSentry.executeWithTracing("DropStageIfExists", + () -> s3StorageOperations.dropBucketObject(stageName), + Map.of("stage", stageName)); + } + +} diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftSqlOperations.java b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/operations/RedshiftSqlOperations.java similarity index 98% rename from airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftSqlOperations.java rename to airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/operations/RedshiftSqlOperations.java index df706ee9385e2..f567e338fc86e 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftSqlOperations.java +++ b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/operations/RedshiftSqlOperations.java @@ -2,7 +2,7 @@ * Copyright (c) 2021 Airbyte, Inc., all rights reserved. */ -package io.airbyte.integrations.destination.redshift; +package io.airbyte.integrations.destination.redshift.operations; import static io.airbyte.db.jdbc.JdbcUtils.getDefaultSourceOperations; @@ -26,7 +26,7 @@ public class RedshiftSqlOperations extends JdbcSqlOperations { private static final Logger LOGGER = LoggerFactory.getLogger(RedshiftSqlOperations.class); - protected static final int REDSHIFT_VARCHAR_MAX_BYTE_SIZE = 65535; + public static final int REDSHIFT_VARCHAR_MAX_BYTE_SIZE = 65535; private static final String SELECT_ALL_TABLES_WITH_NOT_SUPER_TYPE_SQL_STATEMENT = """ select tablename, schemaname diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftInsertDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftInsertDestinationAcceptanceTest.java index 0fc18225ad1f6..f8a8eef92a095 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftInsertDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftInsertDestinationAcceptanceTest.java @@ -38,7 +38,7 @@ * Integration test testing the {@link RedshiftInsertDestination}. As the Redshift test credentials * contain S3 credentials by default, we remove these credentials. */ -class RedshiftInsertDestinationAcceptanceTest extends RedshiftCopyDestinationAcceptanceTest { +class RedshiftInsertDestinationAcceptanceTest extends RedshiftStagingS3DestinationAcceptanceTest { public static final String DATASET_ID = Strings.addRandomSuffix("airbyte_tests", "_", 8); private static final String TYPE = "type"; diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftS3StagingInsertDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftS3StagingInsertDestinationAcceptanceTest.java index d540d18f95a1f..9956c32ab75fb 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftS3StagingInsertDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftS3StagingInsertDestinationAcceptanceTest.java @@ -33,7 +33,7 @@ import org.jooq.Result; import org.junit.jupiter.api.Test; -public class RedshiftS3StagingInsertDestinationAcceptanceTest extends RedshiftCopyDestinationAcceptanceTest { +public class RedshiftS3StagingInsertDestinationAcceptanceTest extends RedshiftStagingS3DestinationAcceptanceTest { public static final String DATASET_ID = Strings.addRandomSuffix("airbyte_tests", "_", 8); private static final String TYPE = "type"; diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftCopyDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftStagingS3DestinationAcceptanceTest.java similarity index 94% rename from airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftCopyDestinationAcceptanceTest.java rename to airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftStagingS3DestinationAcceptanceTest.java index bd5ea0c3d2567..8176a1b5696e5 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftCopyDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftStagingS3DestinationAcceptanceTest.java @@ -14,6 +14,7 @@ import io.airbyte.db.factory.DSLContextFactory; import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.integrations.base.JavaBaseConstants; +import io.airbyte.integrations.destination.redshift.operations.RedshiftSqlOperations; import io.airbyte.integrations.standardtest.destination.JdbcDestinationAcceptanceTest; import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import java.nio.file.Path; @@ -24,12 +25,12 @@ import org.slf4j.LoggerFactory; /** - * Integration test testing {@link RedshiftCopyS3Destination}. The default Redshift integration test + * Integration test testing {@link RedshiftStagingS3Destination}. The default Redshift integration test * credentials contain S3 credentials - this automatically causes COPY to be selected. */ -public class RedshiftCopyDestinationAcceptanceTest extends JdbcDestinationAcceptanceTest { +public class RedshiftStagingS3DestinationAcceptanceTest extends JdbcDestinationAcceptanceTest { - private static final Logger LOGGER = LoggerFactory.getLogger(RedshiftCopyDestinationAcceptanceTest.class); + private static final Logger LOGGER = LoggerFactory.getLogger(RedshiftStagingS3DestinationAcceptanceTest.class); // config from which to create / delete schemas. private JsonNode baseConfig; diff --git a/airbyte-integrations/connectors/destination-redshift/src/test/java/io/airbyte/integrations/destination/redshift/RedshiftStreamCopierTest.java b/airbyte-integrations/connectors/destination-redshift/src/test/java/io/airbyte/integrations/destination/redshift/copiers/RedshiftStreamCopierTest.java similarity index 99% rename from airbyte-integrations/connectors/destination-redshift/src/test/java/io/airbyte/integrations/destination/redshift/RedshiftStreamCopierTest.java rename to airbyte-integrations/connectors/destination-redshift/src/test/java/io/airbyte/integrations/destination/redshift/copiers/RedshiftStreamCopierTest.java index 8026a0e339f19..949c72268d5ad 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test/java/io/airbyte/integrations/destination/redshift/RedshiftStreamCopierTest.java +++ b/airbyte-integrations/connectors/destination-redshift/src/test/java/io/airbyte/integrations/destination/redshift/copiers/RedshiftStreamCopierTest.java @@ -2,7 +2,7 @@ * Copyright (c) 2021 Airbyte, Inc., all rights reserved. */ -package io.airbyte.integrations.destination.redshift; +package io.airbyte.integrations.destination.redshift.copiers; import static java.util.Comparator.comparing; import static org.mockito.ArgumentMatchers.argThat; diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3StorageOperations.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3StorageOperations.java index 7690e2bd8f8ef..e2c3c3a553d5c 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3StorageOperations.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3StorageOperations.java @@ -290,7 +290,11 @@ public boolean isValidData(final JsonNode jsonNode) { protected Map getMetadataMapping() { return ImmutableMap.of( AesCbcEnvelopeEncryptionBlobDecorator.ENCRYPTED_CONTENT_ENCRYPTING_KEY, "x-amz-key", - AesCbcEnvelopeEncryptionBlobDecorator.INITIALIZATION_VECTOR, "x-amz-iv" - ); + AesCbcEnvelopeEncryptionBlobDecorator.INITIALIZATION_VECTOR, "x-amz-iv"); } + + public void uploadManifest(String bucketName, String manifestFilePath, String manifestContents) { + s3Client.putObject(s3Config.getBucketName(), manifestFilePath, manifestContents); + } + } diff --git a/docs/integrations/destinations/redshift.md b/docs/integrations/destinations/redshift.md index 6dd424fdaa3db..619c83c738a73 100644 --- a/docs/integrations/destinations/redshift.md +++ b/docs/integrations/destinations/redshift.md @@ -138,6 +138,7 @@ Each stream will be output into its own raw table in Redshift. Each table will c | Version | Date | Pull Request | Subject | |:--------|:-----------| :----- | :------ | +| 0.3.33 | 2022-05-04 | [12601](https://github.com/airbytehq/airbyte/pull/12601) | Apply buffering strategy for S3 staging | | 0.3.32 | 2022-04-20 | [12085](https://github.com/airbytehq/airbyte/pull/12085) | Fixed bug with switching between INSERT and COPY config | | 0.3.31 | 2022-04-19 | [\#12064](https://github.com/airbytehq/airbyte/pull/12064) | Added option to support SUPER datatype in _airbyte_raw_** table | | 0.3.29 | 2022-04-05 | [11729](https://github.com/airbytehq/airbyte/pull/11729) | Fixed bug with dashes in schema name | | From c69423b212e0d562daf53ce5e483984867f48758 Mon Sep 17 00:00:00 2001 From: Peter Hu Date: Thu, 12 May 2022 02:36:13 -0700 Subject: [PATCH 35/55] Parameterize jdk and nginx base images for better M1 support (#11262) currently on m1 macs, switching between building for arm vs amd64 architectures is a bit cumbersome because some of the base docker images have not been parameterized yet, so you will run into build errors unless you untag those base images every time you switch between architectures. This PR should allow you switch freely between the two without needing that manual step. This PR also adds a single env var BUILD_ARCH that can be used to switch between building for arm vs amd64. With this PR we can build and push images for individual platform components which is much faster than trying to redeploy everything when iterating on changes that are limited to only a few components. Ideally we'd have a github action that allowed us to deploy individual platform components, but until that exists this seems like a reasonable solution for faster iteration. --- airbyte-bootloader/Dockerfile | 3 ++- airbyte-container-orchestrator/Dockerfile | 3 ++- airbyte-metrics/reporter/Dockerfile | 3 ++- airbyte-scheduler/app/Dockerfile | 3 ++- airbyte-server/Dockerfile | 3 ++- airbyte-webapp/Dockerfile | 3 ++- airbyte-workers/Dockerfile | 3 ++- build.gradle | 6 +++++- 8 files changed, 19 insertions(+), 8 deletions(-) diff --git a/airbyte-bootloader/Dockerfile b/airbyte-bootloader/Dockerfile index f4d862fdb6f01..e7c0172aac459 100644 --- a/airbyte-bootloader/Dockerfile +++ b/airbyte-bootloader/Dockerfile @@ -1,5 +1,6 @@ ARG JDK_VERSION=17.0.1 -FROM openjdk:${JDK_VERSION}-slim +ARG JDK_IMAGE=openjdk:${JDK_VERSION}-slim +FROM ${JDK_IMAGE} ARG VERSION=0.38.2-alpha diff --git a/airbyte-container-orchestrator/Dockerfile b/airbyte-container-orchestrator/Dockerfile index 20ca9ac800af7..2c64bf90ffc17 100644 --- a/airbyte-container-orchestrator/Dockerfile +++ b/airbyte-container-orchestrator/Dockerfile @@ -1,5 +1,6 @@ ARG JDK_VERSION=17.0.1 -FROM openjdk:${JDK_VERSION}-slim AS sync-attempt +ARG JDK_IMAGE=openjdk:${JDK_VERSION}-slim +FROM ${JDK_IMAGE} AS sync-attempt ARG DOCKER_BUILD_ARCH=amd64 diff --git a/airbyte-metrics/reporter/Dockerfile b/airbyte-metrics/reporter/Dockerfile index 6b508f3ab802d..a1f99b76d276b 100644 --- a/airbyte-metrics/reporter/Dockerfile +++ b/airbyte-metrics/reporter/Dockerfile @@ -1,5 +1,6 @@ ARG JDK_VERSION=17.0.1 -FROM openjdk:${JDK_VERSION}-slim AS metrics-reporter +ARG JDK_IMAGE=openjdk:${JDK_VERSION}-slim +FROM ${JDK_IMAGE} AS metrics-reporter ARG VERSION=0.38.2-alpha diff --git a/airbyte-scheduler/app/Dockerfile b/airbyte-scheduler/app/Dockerfile index 317910cf6a5e6..c37cf2f8706b2 100644 --- a/airbyte-scheduler/app/Dockerfile +++ b/airbyte-scheduler/app/Dockerfile @@ -1,5 +1,6 @@ ARG JDK_VERSION=17.0.1 -FROM openjdk:${JDK_VERSION}-slim AS scheduler +ARG JDK_IMAGE=openjdk:${JDK_VERSION}-slim +FROM ${JDK_IMAGE} AS scheduler ARG VERSION=0.38.2-alpha diff --git a/airbyte-server/Dockerfile b/airbyte-server/Dockerfile index 0c8533002e1d7..0dffbd46aea22 100644 --- a/airbyte-server/Dockerfile +++ b/airbyte-server/Dockerfile @@ -1,5 +1,6 @@ ARG JDK_VERSION=17.0.1 -FROM openjdk:${JDK_VERSION}-slim AS server +ARG JDK_IMAGE=openjdk:${JDK_VERSION}-slim +FROM ${JDK_IMAGE} AS server EXPOSE 8000 diff --git a/airbyte-webapp/Dockerfile b/airbyte-webapp/Dockerfile index 3435c31534ed6..92941248d7743 100644 --- a/airbyte-webapp/Dockerfile +++ b/airbyte-webapp/Dockerfile @@ -1,4 +1,5 @@ -FROM nginx:1.19-alpine as webapp +ARG NGINX_IMAGE=nginx:1.19-alpine +FROM ${NGINX_IMAGE} as webapp EXPOSE 80 diff --git a/airbyte-workers/Dockerfile b/airbyte-workers/Dockerfile index dd0908c01d856..8ce42ab96c2ca 100644 --- a/airbyte-workers/Dockerfile +++ b/airbyte-workers/Dockerfile @@ -1,5 +1,6 @@ ARG JDK_VERSION=17.0.1 -FROM openjdk:${JDK_VERSION}-slim AS worker +ARG JDK_IMAGE=openjdk:${JDK_VERSION}-slim +FROM ${JDK_IMAGE} AS worker ARG DOCKER_BUILD_ARCH=amd64 diff --git a/build.gradle b/build.gradle index 6f0c92de574fd..d50f722d60260 100644 --- a/build.gradle +++ b/build.gradle @@ -144,11 +144,13 @@ def Task getDockerBuildTask(String artifactName, String projectDir, String build return task ("buildDockerImage-$artifactName"(type: DockerBuildImage) { def jdkVersion = System.getenv('JDK_VERSION') ?: '17.0.1' - def arch = System.getProperty("os.arch").toLowerCase() + def arch = System.getenv('BUILD_ARCH') ?: System.getProperty("os.arch").toLowerCase() def isArm64 = arch == "aarch64" || arch == "arm64" def buildPlatform = System.getenv('DOCKER_BUILD_PLATFORM') ?: isArm64 ? 'linux/arm64' : 'linux/amd64' def alpineImage = System.getenv('ALPINE_IMAGE') ?: isArm64 ? 'arm64v8/alpine:3.14' : 'amd64/alpine:3.14' + def nginxImage = System.getenv('NGINX_IMAGE') ?: isArm64 ? 'arm64v8/nginx:1.19-alpine' : 'amd64/nginx:1.19-alpine' + def openjdkImage = System.getenv('JDK_IMAGE') ?: isArm64 ? "arm64v8/openjdk:${jdkVersion}-slim" : "amd64/openjdk:${jdkVersion}-slim" def buildArch = System.getenv('DOCKER_BUILD_ARCH') ?: isArm64 ? 'arm64' : 'amd64' inputDir = file("$projectDir/build/docker") @@ -157,6 +159,8 @@ def Task getDockerBuildTask(String artifactName, String projectDir, String build buildArgs.put('JDK_VERSION', jdkVersion) buildArgs.put('DOCKER_BUILD_ARCH', buildArch) buildArgs.put('ALPINE_IMAGE', alpineImage) + buildArgs.put('NGINX_IMAGE', nginxImage) + buildArgs.put('JDK_IMAGE', openjdkImage) buildArgs.put('VERSION', buildVersion) }) } From 1c8b18890dc42383597754fdd4e5f1a115274ed9 Mon Sep 17 00:00:00 2001 From: Serhii Chvaliuk Date: Thu, 12 May 2022 12:50:43 +0300 Subject: [PATCH 36/55] Source Hubspot: ensure all oauth2.0 scopes in "check" command (#12711) Signed-off-by: Sergey Chvalyuk --- .../resources/seed/source_definitions.yaml | 2 +- .../src/main/resources/seed/source_specs.yaml | 2 +- .../connectors/source-hubspot/Dockerfile | 2 +- .../source-hubspot/source_hubspot/source.py | 43 ++++++++++++++++++- .../airbyte/oauth/flows/HubspotOAuthFlow.java | 5 ++- docs/integrations/sources/hubspot.md | 3 ++ 6 files changed, 51 insertions(+), 6 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index 381cae9d39de6..a804d8e657c4a 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -390,7 +390,7 @@ - name: HubSpot sourceDefinitionId: 36c891d9-4bd9-43ac-bad2-10e12756272c dockerRepository: airbyte/source-hubspot - dockerImageTag: 0.1.58 + dockerImageTag: 0.1.59 documentationUrl: https://docs.airbyte.io/integrations/sources/hubspot icon: hubspot.svg sourceType: api diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index 82cf9c9a1420a..4f16ad96510bb 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -3536,7 +3536,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-hubspot:0.1.58" +- dockerImage: "airbyte/source-hubspot:0.1.59" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/hubspot" connectionSpecification: diff --git a/airbyte-integrations/connectors/source-hubspot/Dockerfile b/airbyte-integrations/connectors/source-hubspot/Dockerfile index 6743bbbf23d0d..8e61ae2aae279 100644 --- a/airbyte-integrations/connectors/source-hubspot/Dockerfile +++ b/airbyte-integrations/connectors/source-hubspot/Dockerfile @@ -34,5 +34,5 @@ COPY source_hubspot ./source_hubspot ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.58 +LABEL io.airbyte.version=0.1.59 LABEL io.airbyte.name=airbyte/source-hubspot diff --git a/airbyte-integrations/connectors/source-hubspot/source_hubspot/source.py b/airbyte-integrations/connectors/source-hubspot/source_hubspot/source.py index 4a321404950af..9a0b24916c050 100644 --- a/airbyte-integrations/connectors/source-hubspot/source_hubspot/source.py +++ b/airbyte-integrations/connectors/source-hubspot/source_hubspot/source.py @@ -6,6 +6,7 @@ import logging from typing import Any, Iterator, List, Mapping, MutableMapping, Optional, Tuple +import requests from airbyte_cdk.models import AirbyteMessage, ConfiguredAirbyteCatalog from airbyte_cdk.sources import AbstractSource from airbyte_cdk.sources.deprecated.base_source import ConfiguredAirbyteStream @@ -44,22 +45,60 @@ Workflows, ) +SCOPES = [ + "automation", + "content", + "crm.lists.read", + "crm.objects.companies.read", + "crm.objects.contacts.read", + "crm.objects.deals.read", + "crm.objects.feedback_submissions.read", + "crm.objects.owners.read", + "crm.schemas.companies.read", + "crm.schemas.contacts.read", + "crm.schemas.deals.read", + "e-commerce", + "files", + "files.ui_hidden.read", + "forms", + "forms-uploaded-files", + "sales-email-read", + "tickets", +] + class SourceHubspot(AbstractSource): def check_connection(self, logger: logging.Logger, config: Mapping[str, Any]) -> Tuple[bool, Optional[Any]]: """Check connection""" + common_params = self.get_common_params(config=config) + if common_params.get("authenticator"): + access_token = common_params["authenticator"].get_access_token() + url = f"https://api.hubapi.com/oauth/v1/access-tokens/{access_token}" + try: + response = requests.get(url=url) + response.raise_for_status() + return self.check_scopes(response.json()) + except Exception as e: + return False, repr(e) + alive = True error_msg = None - common_params = self.get_common_params(config=config) try: contacts = Contacts(**common_params) _ = contacts.properties except HTTPError as error: alive = False error_msg = repr(error) - return alive, error_msg + @staticmethod + def check_scopes(response_json): + granted_scopes = response_json["scopes"] + missed_scopes = set(SCOPES) - set(granted_scopes) + if missed_scopes: + return False, "missed required scopes: " + ", ".join(sorted(missed_scopes)) + return True, None + @staticmethod def get_api(config: Mapping[str, Any]) -> API: credentials = config.get("credentials", {}) diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/HubspotOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/HubspotOAuthFlow.java index 1effe63812506..90936bdd09d6a 100644 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/HubspotOAuthFlow.java +++ b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/HubspotOAuthFlow.java @@ -76,7 +76,10 @@ private String getScopes() { "crm.schemas.companies.read", "files", "forms-uploaded-files", - "files.ui_hidden.read"); + "files.ui_hidden.read", + "crm.objects.feedback_submissions.read", + "sales-email-read", + "automation"); } /** diff --git a/docs/integrations/sources/hubspot.md b/docs/integrations/sources/hubspot.md index 6268f44bd8193..9da35b236ced2 100644 --- a/docs/integrations/sources/hubspot.md +++ b/docs/integrations/sources/hubspot.md @@ -132,6 +132,8 @@ If you are using OAuth, most of the streams require the appropriate [scopes](htt | `deals` | `contacts` | | `email_events` | `content` | | `engagements` | `contacts` | +| `engagements_emails` | `sales-email-read` | +| `feedback_submissions` | `crm.objects.feedback_submissions.read` | | `forms` | `forms` | | `form_submissions`| `forms` | | `line_items` | `e-commerce` | @@ -147,6 +149,7 @@ If you are using OAuth, most of the streams require the appropriate [scopes](htt | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------| +| 0.1.59 | 2022-05-10 | [\#12711](https://github.com/airbytehq/airbyte/pull/12711) | Ensure oauth2.0 token has all needed scopes in "check" command | | 0.1.58 | 2022-05-04 | [\#12482](https://github.com/airbytehq/airbyte/pull/12482) | Update input configuration copy | | 0.1.57 | 2022-05-04 | [12198](https://github.com/airbytehq/airbyte/pull/12198) | Add deals associations for quotes | 0.1.56 | 2022-05-02 | [12515](https://github.com/airbytehq/airbyte/pull/12515) | Extra logs for troubleshooting 403 errors | From 2af780db3efbbf34e28935b95f725bcd850342e3 Mon Sep 17 00:00:00 2001 From: George Claireaux Date: Thu, 12 May 2022 11:08:52 +0100 Subject: [PATCH 37/55] base-java: Add utility for `AirbyteTraceMessage` and naively emit on any connector error (#12614) * added AirbyteLoggedException class * adding in int runr * changes * refactored to AirbyteTracedException to align with python impl. * added catch for Exceptions that are already AirbyteTracedException * refactor to static class & catch with UncaughtExceptionHandler * testing ExceptionHandler * add tests * added docs section on using AirbyteTraceMessageUtility * made AirbyteMessage maker methods more intuitive * fix spotbugs errors * format --- .../base/AirbyteExceptionHandler.java | 37 ++++++++++ .../base/AirbyteTraceMessageUtility.java | 67 +++++++++++++++++ .../integrations/base/IntegrationRunner.java | 2 + .../base/AirbyteExceptionHandlerTest.java | 73 +++++++++++++++++++ .../base/AirbyteTraceMessageUtilityTest.java | 59 +++++++++++++++ .../tutorials/building-a-java-destination.md | 32 ++++++++ 6 files changed, 270 insertions(+) create mode 100644 airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/AirbyteExceptionHandler.java create mode 100644 airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/AirbyteTraceMessageUtility.java create mode 100644 airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/base/AirbyteExceptionHandlerTest.java create mode 100644 airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/base/AirbyteTraceMessageUtilityTest.java diff --git a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/AirbyteExceptionHandler.java b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/AirbyteExceptionHandler.java new file mode 100644 index 0000000000000..571d18ea81aa1 --- /dev/null +++ b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/AirbyteExceptionHandler.java @@ -0,0 +1,37 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.base; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class AirbyteExceptionHandler implements Thread.UncaughtExceptionHandler { + + private static final Logger LOGGER = LoggerFactory.getLogger(AirbyteExceptionHandler.class); + public static final String logMessage = "Something went wrong in the connector. See the logs for more details."; + + @Override + public void uncaughtException(Thread t, Throwable e) { + // This is a naive AirbyteTraceMessage emission in order to emit one when any error occurs in a + // connector. + // If a connector implements AirbyteTraceMessage emission itself, this code will result in an + // additional one being emitted. + // this is fine tho because: + // "The earliest AirbyteTraceMessage where type=error will be used to populate the FailureReason for + // the sync." + // from the spec: + // https://docs.google.com/document/d/1ctrj3Yh_GjtQ93aND-WH3ocqGxsmxyC3jfiarrF6NY0/edit# + LOGGER.error(logMessage, e); + AirbyteTraceMessageUtility.emitSystemErrorTrace(e, logMessage); + terminate(); + } + + // by doing this in a separate method we can mock it to avoid closing the jvm and therefore test + // properly + protected void terminate() { + System.exit(0); + } + +} diff --git a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/AirbyteTraceMessageUtility.java b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/AirbyteTraceMessageUtility.java new file mode 100644 index 0000000000000..50f43a5330a95 --- /dev/null +++ b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/AirbyteTraceMessageUtility.java @@ -0,0 +1,67 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.base; + +import io.airbyte.protocol.models.AirbyteErrorTraceMessage; +import io.airbyte.protocol.models.AirbyteErrorTraceMessage.FailureType; +import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.AirbyteMessage.Type; +import io.airbyte.protocol.models.AirbyteTraceMessage; +import java.util.Arrays; +import java.util.function.Consumer; + +public final class AirbyteTraceMessageUtility { + + private AirbyteTraceMessageUtility() {} + + public static void emitSystemErrorTrace(final Throwable e, final String displayMessage) { + emitErrorTrace(e, displayMessage, FailureType.SYSTEM_ERROR); + } + + public static void emitConfigErrorTrace(final Throwable e, final String displayMessage) { + emitErrorTrace(e, displayMessage, FailureType.CONFIG_ERROR); + } + + public static void emitErrorTrace(final Throwable e, final String displayMessage, final FailureType failureType) { + emitMessage(makeErrorTraceAirbyteMessage(e, displayMessage, failureType)); + } + + // todo: handle the other types of trace message we'll expect in the future, see + // io.airbyte.protocol.models.AirbyteTraceMessage + // & the tech spec: + // https://docs.google.com/document/d/1ctrj3Yh_GjtQ93aND-WH3ocqGxsmxyC3jfiarrF6NY0/edit# + // public void emitNotificationTrace() {} + // public void emitMetricTrace() {} + + private static void emitMessage(AirbyteMessage message) { + // Not sure why defaultOutputRecordCollector is under Destination specifically, + // but this matches usage elsewhere in base-java + Consumer outputRecordCollector = Destination::defaultOutputRecordCollector; + outputRecordCollector.accept(message); + } + + private static AirbyteMessage makeErrorTraceAirbyteMessage( + final Throwable e, + final String displayMessage, + final FailureType failureType) { + + return makeAirbyteMessageFromTraceMessage( + makeAirbyteTraceMessage(AirbyteTraceMessage.Type.ERROR) + .withError(new AirbyteErrorTraceMessage() + .withFailureType(failureType) + .withMessage(displayMessage) + .withInternalMessage(e.toString()) + .withStackTrace(Arrays.toString(e.getStackTrace())))); + } + + private static AirbyteMessage makeAirbyteMessageFromTraceMessage(AirbyteTraceMessage airbyteTraceMessage) { + return new AirbyteMessage().withType(Type.TRACE).withTrace(airbyteTraceMessage); + } + + private static AirbyteTraceMessage makeAirbyteTraceMessage(final AirbyteTraceMessage.Type traceMessageType) { + return new AirbyteTraceMessage().withType(traceMessageType).withEmittedAt((double) System.currentTimeMillis()); + } + +} diff --git a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/IntegrationRunner.java b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/IntegrationRunner.java index 56a120537d4b3..8422eae5d9efc 100644 --- a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/IntegrationRunner.java +++ b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/IntegrationRunner.java @@ -86,6 +86,8 @@ public IntegrationRunner(final Source source) { this.source = source; this.destination = destination; validator = new JsonSchemaValidator(); + + Thread.setDefaultUncaughtExceptionHandler(new AirbyteExceptionHandler()); } @VisibleForTesting diff --git a/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/base/AirbyteExceptionHandlerTest.java b/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/base/AirbyteExceptionHandlerTest.java new file mode 100644 index 0000000000000..2beeab4aaefe1 --- /dev/null +++ b/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/base/AirbyteExceptionHandlerTest.java @@ -0,0 +1,73 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.base; + +import static org.mockito.Mockito.doNothing; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.spy; + +import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.commons.json.Jsons; +import java.io.ByteArrayOutputStream; +import java.io.PrintStream; +import java.nio.charset.StandardCharsets; +import lombok.SneakyThrows; +import org.junit.After; +import org.junit.Before; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; +import org.slf4j.LoggerFactory; + +public class AirbyteExceptionHandlerTest { + + PrintStream originalOut = System.out; + private volatile ByteArrayOutputStream outContent = new ByteArrayOutputStream(); + + @Before + public void setUpOut() { + System.setOut(new PrintStream(outContent, true, StandardCharsets.UTF_8)); + } + + @Test + void testTraceMessageEmission() throws Exception { + // mocking terminate() method in AirbyteExceptionHandler, so we don't kill the JVM + AirbyteExceptionHandler airbyteExceptionHandler = spy(new AirbyteExceptionHandler()); + doNothing().when(airbyteExceptionHandler).terminate(); + + // have to spawn a new thread to test the uncaught exception handling, + // because junit catches any exceptions in main thread, i.e. they're not 'uncaught' + Thread thread = new Thread() { + + @SneakyThrows + public void run() { + setUpOut(); + final IntegrationRunner runner = Mockito.mock(IntegrationRunner.class); + doThrow(new RuntimeException("error")).when(runner).run(new String[] {"write"}); + runner.run(new String[] {"write"}); + } + + }; + thread.setUncaughtExceptionHandler(airbyteExceptionHandler); + thread.start(); + thread.join(); + System.out.flush(); + revertOut(); + + // now we turn the std out from the thread into json and check it's the expected TRACE message + JsonNode traceMsgJson = Jsons.deserialize(outContent.toString(StandardCharsets.UTF_8)); + LoggerFactory.getLogger(AirbyteExceptionHandlerTest.class).debug(traceMsgJson.toString()); + Assertions.assertEquals("TRACE", traceMsgJson.get("type").asText()); + Assertions.assertEquals("ERROR", traceMsgJson.get("trace").get("type").asText()); + Assertions.assertEquals(AirbyteExceptionHandler.logMessage, traceMsgJson.get("trace").get("error").get("message").asText()); + Assertions.assertEquals("system_error", traceMsgJson.get("trace").get("error").get("failure_type").asText()); + } + + @After + public void revertOut() { + System.setOut(originalOut); + } + +} diff --git a/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/base/AirbyteTraceMessageUtilityTest.java b/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/base/AirbyteTraceMessageUtilityTest.java new file mode 100644 index 0000000000000..0763e79d2abf1 --- /dev/null +++ b/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/base/AirbyteTraceMessageUtilityTest.java @@ -0,0 +1,59 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.base; + +import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.commons.json.Jsons; +import io.airbyte.protocol.models.AirbyteErrorTraceMessage.FailureType; +import java.io.ByteArrayOutputStream; +import java.io.PrintStream; +import java.nio.charset.StandardCharsets; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + +public class AirbyteTraceMessageUtilityTest { + + PrintStream originalOut = System.out; + private final ByteArrayOutputStream outContent = new ByteArrayOutputStream(); + + @BeforeEach + public void setUpOut() { + System.setOut(new PrintStream(outContent, true, StandardCharsets.UTF_8)); + } + + private void assertJsonNodeIsTraceMessage(JsonNode jsonNode) { + // todo: this check could be better by actually trying to convert the JsonNode to an + // AirbyteTraceMessage instance + Assertions.assertEquals("TRACE", jsonNode.get("type").asText()); + Assertions.assertNotNull(jsonNode.get("trace")); + } + + @Test + void testEmitSystemErrorTrace() { + AirbyteTraceMessageUtility.emitSystemErrorTrace(Mockito.mock(RuntimeException.class), "this is a system error"); + assertJsonNodeIsTraceMessage(Jsons.deserialize(outContent.toString(StandardCharsets.UTF_8))); + } + + @Test + void testEmitConfigErrorTrace() { + AirbyteTraceMessageUtility.emitConfigErrorTrace(Mockito.mock(RuntimeException.class), "this is a config error"); + assertJsonNodeIsTraceMessage(Jsons.deserialize(outContent.toString(StandardCharsets.UTF_8))); + } + + @Test + void testEmitErrorTrace() { + AirbyteTraceMessageUtility.emitErrorTrace(Mockito.mock(RuntimeException.class), "this is an error", FailureType.SYSTEM_ERROR); + assertJsonNodeIsTraceMessage(Jsons.deserialize(outContent.toString(StandardCharsets.UTF_8))); + } + + @AfterEach + public void revertOut() { + System.setOut(originalOut); + } + +} diff --git a/docs/connector-development/tutorials/building-a-java-destination.md b/docs/connector-development/tutorials/building-a-java-destination.md index 8361dd127fa03..d133edc2185d7 100644 --- a/docs/connector-development/tutorials/building-a-java-destination.md +++ b/docs/connector-development/tutorials/building-a-java-destination.md @@ -112,6 +112,38 @@ Note: Each time you make a change to your implementation you need to re-build th The nice thing about this approach is that you are running your destination exactly as it will be run by Airbyte. The tradeoff is that iteration is slightly slower, because you need to re-build the connector between each change. +#### Handling Exceptions + +In order to best propagate user-friendly error messages and log error information to the platform, the [Airbyte Protocol](../../understanding-airbyte/airbyte-specification.md#The Airbyte Protocol) implements AirbyteTraceMessage. + +We recommend using AirbyteTraceMessages for known errors, as in these cases you can likely offer the user a helpful message as to what went wrong and suggest how they can resolve it. + +Airbyte provides a static utility class, `io.airbyte.integrations.base.AirbyteTraceMessageUtility`, to give you a clear and straight-forward way to emit these AirbyteTraceMessages. Example usage: +```java +try { + // some connector code responsible for doing X +} +catch (ExceptionIndicatingIncorrectCredentials credErr) { + AirbyteTraceMessageUtility.emitConfigErrorTrace( + credErr, "Connector failed due to incorrect credentials while doing X. Please check your connection is using valid credentials.") + throw credErr +} +catch (ExceptionIndicatingKnownErrorY knownErr) { + AirbyteTraceMessageUtility.emitSystemErrorTrace( + knownErr, "Connector failed because of reason Y while doing X. Please check/do/make ... to resolve this.") + throw knownErr +} +catch (Exception e) { + AirbyteTraceMessageUtility.emitSystemErrorTrace( + e, "Connector failed while doing X. Possible reasons for this could be ...") + throw e +} +``` + +Note the two different error trace methods. +- Where possible `emitConfigErrorTrace` should be used when we are certain the issue arises from a problem with the user's input configuration, e.g. invalid credentials. +- For everything else or if unsure, use `emitSystemErrorTrace`. + ### Step 3: Implement `spec` Each destination contains a specification written in JsonSchema that describes its inputs. Defining the specification is a good place to start when developing your destination. Check out the documentation [here](https://json-schema.org/) to learn the syntax. Here's [an example](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/destination-postgres/src/main/resources/spec.json) of what the `spec.json` looks like for the postgres destination. From 257c48f938f5c1eeee3d2bbefed28b07df278e94 Mon Sep 17 00:00:00 2001 From: oneshcheret <33333155+sashaNeshcheret@users.noreply.github.com> Date: Thu, 12 May 2022 14:30:32 +0300 Subject: [PATCH 38/55] AzureBlobStorage destination: enable DAT tests (#12764) --- ...obStorageCsvDestinationAcceptanceTest.java | 10 ++++++++ ...eBlobStorageDestinationAcceptanceTest.java | 25 ++++++++++++++++++- 2 files changed, 34 insertions(+), 1 deletion(-) diff --git a/airbyte-integrations/connectors/destination-azure-blob-storage/src/test-integration/java/io/airbyte/integrations/destination/azure_blob_storage/AzureBlobStorageCsvDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-azure-blob-storage/src/test-integration/java/io/airbyte/integrations/destination/azure_blob_storage/AzureBlobStorageCsvDestinationAcceptanceTest.java index 904d55ae44dc0..7c35f6dd30714 100644 --- a/airbyte-integrations/connectors/destination-azure-blob-storage/src/test-integration/java/io/airbyte/integrations/destination/azure_blob_storage/AzureBlobStorageCsvDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-azure-blob-storage/src/test-integration/java/io/airbyte/integrations/destination/azure_blob_storage/AzureBlobStorageCsvDestinationAcceptanceTest.java @@ -76,12 +76,22 @@ private static JsonNode getJsonNode(final Map input, final Map json.put(key, Boolean.valueOf(value)); case "integer" -> json.put(key, Integer.valueOf(value)); case "number" -> json.put(key, Double.valueOf(value)); + case "" -> addNoTypeValue(json, key, value); default -> json.put(key, value); } } return json; } + private static void addNoTypeValue(ObjectNode json, String key, String value) { + if (value != null && (value.matches("^\\[.*\\]$")) || value.matches("^\\{.*\\}$")) { + var newNode = Jsons.deserialize(value); + json.set(key, newNode); + } else { + json.put(key, value); + } + } + @Override protected List retrieveRecords(final TestDestinationEnv testEnv, final String streamName, diff --git a/airbyte-integrations/connectors/destination-azure-blob-storage/src/test-integration/java/io/airbyte/integrations/destination/azure_blob_storage/AzureBlobStorageDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-azure-blob-storage/src/test-integration/java/io/airbyte/integrations/destination/azure_blob_storage/AzureBlobStorageDestinationAcceptanceTest.java index 9226e1b0a0253..8b399fa072cb5 100644 --- a/airbyte-integrations/connectors/destination-azure-blob-storage/src/test-integration/java/io/airbyte/integrations/destination/azure_blob_storage/AzureBlobStorageDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-azure-blob-storage/src/test-integration/java/io/airbyte/integrations/destination/azure_blob_storage/AzureBlobStorageDestinationAcceptanceTest.java @@ -17,6 +17,8 @@ import io.airbyte.commons.jackson.MoreMappers; import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; +import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; +import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import java.nio.file.Path; import java.util.ArrayList; import java.util.List; @@ -80,7 +82,8 @@ protected List getAppendBlobClient(final String streamName) th var blobItemList = StreamSupport.stream(containerClient.listBlobs().spliterator(), false) .collect(Collectors.toList()); var filteredBlobList = blobItemList.stream() - .filter(blob -> blob.getName().contains(streamName + "/")).collect(Collectors.toList()); + .filter(blob -> blob.getName().startsWith(streamName + "/")) + .toList(); if (!filteredBlobList.isEmpty()) { List clobClientList = new ArrayList<>(); filteredBlobList.forEach(blobItem -> { @@ -96,6 +99,26 @@ protected List getAppendBlobClient(final String streamName) th protected abstract JsonNode getFormatConfig(); + @Override + protected TestDataComparator getTestDataComparator() { + return new AdvancedTestDataComparator(); + } + + @Override + protected boolean supportBasicDataTypeTest() { + return true; + } + + @Override + protected boolean supportArrayDataTypeTest() { + return true; + } + + @Override + protected boolean supportObjectDataTypeTest() { + return true; + } + /** * This method does the following: *

  • Construct the Azure Blob destination config.
  • From 087f389d9e7b6229d912d9e1d463ed6774ca428f Mon Sep 17 00:00:00 2001 From: Baz Date: Thu, 12 May 2022 14:44:32 +0300 Subject: [PATCH 39/55] =?UTF-8?q?=F0=9F=8E=89=20Destination=20Google=20She?= =?UTF-8?q?ets:=20Support=20OAuth2.0=20=20(#12300)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../oauth/OAuthImplementationFactory.java | 11 ++----- .../DestinationGoogleSheetsOAuthFlow.java | 31 +++++++++++++++++++ .../DestinationGoogleSheetsOAuthFlowTest.java | 22 +++++++++++++ 3 files changed, 56 insertions(+), 8 deletions(-) create mode 100644 airbyte-oauth/src/main/java/io/airbyte/oauth/flows/google/DestinationGoogleSheetsOAuthFlow.java create mode 100644 airbyte-oauth/src/test/java/io/airbyte/oauth/flows/google/DestinationGoogleSheetsOAuthFlowTest.java diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java index 06723d5b0ef31..5d16b41cdc8d3 100644 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java +++ b/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java @@ -9,14 +9,8 @@ import io.airbyte.config.StandardSourceDefinition; import io.airbyte.config.persistence.ConfigRepository; import io.airbyte.oauth.flows.*; -import io.airbyte.oauth.flows.facebook.FacebookMarketingOAuthFlow; -import io.airbyte.oauth.flows.facebook.FacebookPagesOAuthFlow; -import io.airbyte.oauth.flows.facebook.InstagramOAuthFlow; -import io.airbyte.oauth.flows.google.GoogleAdsOAuthFlow; -import io.airbyte.oauth.flows.google.GoogleAnalyticsOAuthFlow; -import io.airbyte.oauth.flows.google.GoogleSearchConsoleOAuthFlow; -import io.airbyte.oauth.flows.google.GoogleSheetsOAuthFlow; -import io.airbyte.oauth.flows.google.YouTubeAnalyticsOAuthFlow; +import io.airbyte.oauth.flows.facebook.*; +import io.airbyte.oauth.flows.google.*; import java.net.http.HttpClient; import java.util.Map; @@ -63,6 +57,7 @@ public OAuthImplementationFactory(final ConfigRepository configRepository, final .put("airbyte/source-shopify", new ShopifyOAuthFlow(configRepository, httpClient)) .put("airbyte/source-tiktok-marketing", new TikTokMarketingOAuthFlow(configRepository, httpClient)) .put("airbyte/destination-snowflake", new DestinationSnowflakeOAuthFlow(configRepository, httpClient)) + .put("airbyte/destination-google-sheets", new DestinationGoogleSheetsOAuthFlow(configRepository, httpClient)) .put("airbyte/source-snowflake", new SourceSnowflakeOAuthFlow(configRepository, httpClient)) .build(); } diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/google/DestinationGoogleSheetsOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/google/DestinationGoogleSheetsOAuthFlow.java new file mode 100644 index 0000000000000..da61dc4e96542 --- /dev/null +++ b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/google/DestinationGoogleSheetsOAuthFlow.java @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.oauth.flows.google; + +import com.google.common.annotations.VisibleForTesting; +import io.airbyte.config.persistence.ConfigRepository; +import java.net.http.HttpClient; +import java.util.function.Supplier; + +public class DestinationGoogleSheetsOAuthFlow extends GoogleOAuthFlow { + + @VisibleForTesting + static final String SCOPE_URL = "https://www.googleapis.com/auth/spreadsheets https://www.googleapis.com/auth/drive"; + + public DestinationGoogleSheetsOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { + super(configRepository, httpClient); + } + + @VisibleForTesting + DestinationGoogleSheetsOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { + super(configRepository, httpClient, stateSupplier); + } + + @Override + protected String getScope() { + return SCOPE_URL; + } + +} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/google/DestinationGoogleSheetsOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/google/DestinationGoogleSheetsOAuthFlowTest.java new file mode 100644 index 0000000000000..f111f0e8e9e90 --- /dev/null +++ b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/google/DestinationGoogleSheetsOAuthFlowTest.java @@ -0,0 +1,22 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.oauth.flows.google; + +import io.airbyte.oauth.BaseOAuthFlow; +import io.airbyte.oauth.flows.BaseOAuthFlowTest; + +public class DestinationGoogleSheetsOAuthFlowTest extends BaseOAuthFlowTest { + + @Override + protected BaseOAuthFlow getOAuthFlow() { + return new DestinationGoogleSheetsOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); + } + + @Override + protected String getExpectedConsentUrl() { + return "https://accounts.google.com/o/oauth2/v2/auth?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&response_type=code&scope=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fspreadsheets+https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fdrive&access_type=offline&state=state&include_granted_scopes=true&prompt=consent"; + } + +} From cbcbab4a2399c08d8f66d1b693ac824c245ba3da Mon Sep 17 00:00:00 2001 From: Harshith Mullapudi Date: Thu, 12 May 2022 18:06:03 +0530 Subject: [PATCH 40/55] feat: added sorting for name, connector and lastsync field (#12302) * feat: added sorting for name, connector and lastsync field * fix: remove console.log --- .../EntityTable/ConnectionTable.tsx | 18 +++- .../EntityTable/ImplementationTable.tsx | 85 +++++++++++++++++-- 2 files changed, 94 insertions(+), 9 deletions(-) diff --git a/airbyte-webapp/src/components/EntityTable/ConnectionTable.tsx b/airbyte-webapp/src/components/EntityTable/ConnectionTable.tsx index 573c6dca7b4c0..fabca8270de50 100644 --- a/airbyte-webapp/src/components/EntityTable/ConnectionTable.tsx +++ b/airbyte-webapp/src/components/EntityTable/ConnectionTable.tsx @@ -57,7 +57,12 @@ const ConnectionTable: React.FC = ({ data, entity, onClickRow, onChangeS const sortData = useCallback( (a, b) => { - const result = a[`${sortBy}Name`].toLowerCase().localeCompare(b[`${sortBy}Name`].toLowerCase()); + let result; + if (sortBy === "lastSync") { + result = b[sortBy] - a[sortBy]; + } else { + result = a[`${sortBy}Name`].toLowerCase().localeCompare(b[`${sortBy}Name`].toLowerCase()); + } if (sortOrder === SortOrderEnum.DESC) { return -1 * result; @@ -129,7 +134,16 @@ const ConnectionTable: React.FC = ({ data, entity, onClickRow, onChangeS ), }, { - Header: , + Header: ( + <> + + onSortClick("lastSync")} + /> + + ), accessor: "lastSync", Cell: ({ cell, row }: CellProps) => ( diff --git a/airbyte-webapp/src/components/EntityTable/ImplementationTable.tsx b/airbyte-webapp/src/components/EntityTable/ImplementationTable.tsx index a914ae87a848c..1011a33fa3280 100644 --- a/airbyte-webapp/src/components/EntityTable/ImplementationTable.tsx +++ b/airbyte-webapp/src/components/EntityTable/ImplementationTable.tsx @@ -1,16 +1,20 @@ -import React from "react"; +import queryString from "query-string"; +import React, { useCallback } from "react"; import { FormattedMessage } from "react-intl"; import { CellProps } from "react-table"; import styled from "styled-components"; import Table from "components/Table"; +import useRouter from "hooks/useRouter"; + import AllConnectionsStatusCell from "./components/AllConnectionsStatusCell"; import ConnectEntitiesCell from "./components/ConnectEntitiesCell"; import ConnectorCell from "./components/ConnectorCell"; import LastSyncCell from "./components/LastSyncCell"; import NameCell from "./components/NameCell"; -import { EntityTableDataItem } from "./types"; +import SortButton from "./components/SortButton"; +import { EntityTableDataItem, SortOrderEnum } from "./types"; const Content = styled.div` margin: 0 32px 0 27px; @@ -23,10 +27,59 @@ type IProps = { }; const ImplementationTable: React.FC = ({ data, entity, onClickRow }) => { + const { query, push } = useRouter(); + const sortBy = query.sortBy || "entity"; + const sortOrder = query.order || SortOrderEnum.ASC; + + const onSortClick = useCallback( + (field: string) => { + const order = + sortBy !== field ? SortOrderEnum.ASC : sortOrder === SortOrderEnum.ASC ? SortOrderEnum.DESC : SortOrderEnum.ASC; + push({ + search: queryString.stringify( + { + sortBy: field, + order: order, + }, + { skipNull: true } + ), + }); + }, + [push, sortBy, sortOrder] + ); + + const sortData = useCallback( + (a, b) => { + let result; + if (sortBy === "lastSync") { + result = b[sortBy] - a[sortBy]; + } else { + result = a[`${sortBy}Name`].toLowerCase().localeCompare(b[`${sortBy}Name`].toLowerCase()); + } + + if (sortOrder === SortOrderEnum.DESC) { + return -1 * result; + } + + return result; + }, + [sortBy, sortOrder] + ); + + const sortingData = React.useMemo(() => data.sort(sortData), [sortData, data]); const columns = React.useMemo( () => [ { - Header: , + Header: ( + <> + + onSortClick("entity")} + /> + + ), headerHighlighted: true, accessor: "entityName", customWidth: 40, @@ -35,7 +88,16 @@ const ImplementationTable: React.FC = ({ data, entity, onClickRow }) => ), }, { - Header: , + Header: ( + <> + + onSortClick("connector")} + /> + + ), accessor: "connectorName", Cell: ({ cell, row }: CellProps) => ( @@ -49,7 +111,16 @@ const ImplementationTable: React.FC = ({ data, entity, onClickRow }) => ), }, { - Header: , + Header: ( + <> + + onSortClick("lastSync")} + /> + + ), accessor: "lastSync", Cell: ({ cell, row }: CellProps) => ( @@ -62,12 +133,12 @@ const ImplementationTable: React.FC = ({ data, entity, onClickRow }) => Cell: ({ cell }: CellProps) => , }, ], - [entity] + [entity, onSortClick, sortBy, sortOrder] ); return ( - +
    ); }; From 440f3db8bbcd4d9d2378a52660f77a82ca1c3c07 Mon Sep 17 00:00:00 2001 From: George Claireaux Date: Thu, 12 May 2022 14:23:29 +0100 Subject: [PATCH 41/55] Destinations BigQuery & Snowflake: use latest base-java to emit AirbyteTraceMessage on errros (#12805) * bumping BQ and Snowflake dests to get AirbyteTraceMessage from base-java * add PR link to changelogs * auto-bump connector version * auto-bump connector version * auto-bump connector version Co-authored-by: Octavia Squidington III --- .../seed/destination_definitions.yaml | 6 +- .../resources/seed/destination_specs.yaml | 6 +- .../Dockerfile | 2 +- .../destination-bigquery/Dockerfile | 2 +- .../destination-snowflake/Dockerfile | 2 +- docs/integrations/destinations/bigquery.md | 130 +++++++++--------- docs/integrations/destinations/snowflake.md | 81 +++++------ 7 files changed, 116 insertions(+), 113 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml index 8d80c6d16c037..4edc6ce194c51 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml @@ -27,7 +27,7 @@ - name: BigQuery destinationDefinitionId: 22f6c74f-5699-40ff-833c-4a879ea40133 dockerRepository: airbyte/destination-bigquery - dockerImageTag: 1.1.4 + dockerImageTag: 1.1.5 documentationUrl: https://docs.airbyte.io/integrations/destinations/bigquery icon: bigquery.svg resourceRequirements: @@ -40,7 +40,7 @@ - name: BigQuery (denormalized typed struct) destinationDefinitionId: 079d5540-f236-4294-ba7c-ade8fd918496 dockerRepository: airbyte/destination-bigquery-denormalized - dockerImageTag: 0.3.4 + dockerImageTag: 0.3.5 documentationUrl: https://docs.airbyte.io/integrations/destinations/bigquery icon: bigquery.svg resourceRequirements: @@ -257,7 +257,7 @@ - name: Snowflake destinationDefinitionId: 424892c4-daac-4491-b35d-c6688ba547ba dockerRepository: airbyte/destination-snowflake - dockerImageTag: 0.4.25 + dockerImageTag: 0.4.26 documentationUrl: https://docs.airbyte.io/integrations/destinations/snowflake icon: snowflake.svg resourceRequirements: diff --git a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml index 3db14f30d966d..1609c343b9057 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml @@ -285,7 +285,7 @@ supported_destination_sync_modes: - "overwrite" - "append" -- dockerImage: "airbyte/destination-bigquery:1.1.4" +- dockerImage: "airbyte/destination-bigquery:1.1.5" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/bigquery" connectionSpecification: @@ -494,7 +494,7 @@ - "overwrite" - "append" - "append_dedup" -- dockerImage: "airbyte/destination-bigquery-denormalized:0.3.4" +- dockerImage: "airbyte/destination-bigquery-denormalized:0.3.5" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/bigquery" connectionSpecification: @@ -4096,7 +4096,7 @@ supported_destination_sync_modes: - "overwrite" - "append" -- dockerImage: "airbyte/destination-snowflake:0.4.25" +- dockerImage: "airbyte/destination-snowflake:0.4.26" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/snowflake" connectionSpecification: diff --git a/airbyte-integrations/connectors/destination-bigquery-denormalized/Dockerfile b/airbyte-integrations/connectors/destination-bigquery-denormalized/Dockerfile index f17b973bb29c3..6afb73da7f18b 100644 --- a/airbyte-integrations/connectors/destination-bigquery-denormalized/Dockerfile +++ b/airbyte-integrations/connectors/destination-bigquery-denormalized/Dockerfile @@ -17,5 +17,5 @@ ENV ENABLE_SENTRY true COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.3.4 +LABEL io.airbyte.version=0.3.5 LABEL io.airbyte.name=airbyte/destination-bigquery-denormalized diff --git a/airbyte-integrations/connectors/destination-bigquery/Dockerfile b/airbyte-integrations/connectors/destination-bigquery/Dockerfile index 43ee235894119..c5639583f7a6d 100644 --- a/airbyte-integrations/connectors/destination-bigquery/Dockerfile +++ b/airbyte-integrations/connectors/destination-bigquery/Dockerfile @@ -17,5 +17,5 @@ ENV ENABLE_SENTRY true COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=1.1.4 +LABEL io.airbyte.version=1.1.5 LABEL io.airbyte.name=airbyte/destination-bigquery diff --git a/airbyte-integrations/connectors/destination-snowflake/Dockerfile b/airbyte-integrations/connectors/destination-snowflake/Dockerfile index a187a9d25888d..19e8d1fa1f684 100644 --- a/airbyte-integrations/connectors/destination-snowflake/Dockerfile +++ b/airbyte-integrations/connectors/destination-snowflake/Dockerfile @@ -20,5 +20,5 @@ RUN tar xf ${APPLICATION}.tar --strip-components=1 ENV ENABLE_SENTRY true -LABEL io.airbyte.version=0.4.25 +LABEL io.airbyte.version=0.4.26 LABEL io.airbyte.name=airbyte/destination-snowflake diff --git a/docs/integrations/destinations/bigquery.md b/docs/integrations/destinations/bigquery.md index 636f0319f1233..816a2231fbd7e 100644 --- a/docs/integrations/destinations/bigquery.md +++ b/docs/integrations/destinations/bigquery.md @@ -207,72 +207,74 @@ This uploads data directly from your source to BigQuery. While this is faster to ### bigquery -| Version | Date | Pull Request | Subject | -|:--------|:-----------| :--- |:--------------------------------------------------------------------------------------------| -| 1.1.4 | 2022-05-04 | [12578](https://github.com/airbytehq/airbyte/pull/12578) | In JSON to Avro conversion, log JSON field values that do not follow Avro schema for debugging. | -| 1.1.3 | 2022-05-02 | [12528](https://github.com/airbytehq/airbyte/pull/12528) | Update Dataset location field description | -| 1.1.2 | 2022-04-29 | [12477](https://github.com/airbytehq/airbyte/pull/12477) | Dataset location is a required field | -| 1.1.1 | 2022-04-15 | [12068](https://github.com/airbytehq/airbyte/pull/12068) | Fixed bug with GCS bucket conditional binding | -| 1.1.0 | 2022-04-06 | [11776](https://github.com/airbytehq/airbyte/pull/11776) | Use serialized buffering strategy to reduce memory consumption. | -| 1.0.2 | 2022-03-30 | [11620](https://github.com/airbytehq/airbyte/pull/11620) | Updated spec | -| 1.0.1 | 2022-03-24 | [11350](https://github.com/airbytehq/airbyte/pull/11350) | Improve check performance | -| 1.0.0 | 2022-03-18 | [11238](https://github.com/airbytehq/airbyte/pull/11238) | Updated spec and documentation | -| 0.6.12 | 2022-03-18 | [10793](https://github.com/airbytehq/airbyte/pull/10793) | Fix namespace with invalid characters | -| 0.6.11 | 2022-03-03 | [10755](https://github.com/airbytehq/airbyte/pull/10755) | Make sure to kill children threads and stop JVM | -| 0.6.8 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `-XX:+ExitOnOutOfMemoryError` JVM option | -| 0.6.6 | 2022-02-01 | [\#9959](https://github.com/airbytehq/airbyte/pull/9959) | Fix null pointer exception from buffered stream consumer. | -| 0.6.6 | 2022-01-29 | [\#9745](https://github.com/airbytehq/airbyte/pull/9745) | Integrate with Sentry. | -| 0.6.5 | 2022-01-18 | [\#9573](https://github.com/airbytehq/airbyte/pull/9573) | BigQuery Destination : update description for some input fields | -| 0.6.4 | 2022-01-17 | [\#8383](https://github.com/airbytehq/airbyte/issues/8383) | Support dataset-id prefixed by project-id | -| 0.6.3 | 2022-01-12 | [\#9415](https://github.com/airbytehq/airbyte/pull/9415) | BigQuery Destination : Fix GCS processing of Facebook data | -| 0.6.2 | 2022-01-10 | [\#9121](https://github.com/airbytehq/airbyte/pull/9121) | Fixed check method for GCS mode to verify if all roles assigned to user | -| 0.6.1 | 2021-12-22 | [\#9039](https://github.com/airbytehq/airbyte/pull/9039) | Added part_size configuration to UI for GCS staging | -| 0.6.0 | 2021-12-17 | [\#8788](https://github.com/airbytehq/airbyte/issues/8788) | BigQuery/BiqQuery denorm Destinations : Add possibility to use different types of GCS files | -| 0.5.1 | 2021-12-16 | [\#8816](https://github.com/airbytehq/airbyte/issues/8816) | Update dataset locations | -| 0.5.0 | 2021-10-26 | [\#7240](https://github.com/airbytehq/airbyte/issues/7240) | Output partitioned/clustered tables | -| 0.4.1 | 2021-10-04 | [\#6733](https://github.com/airbytehq/airbyte/issues/6733) | Support dataset starting with numbers | -| 0.4.0 | 2021-08-26 | [\#5296](https://github.com/airbytehq/airbyte/issues/5296) | Added GCS Staging uploading option | -| 0.3.12 | 2021-08-03 | [\#3549](https://github.com/airbytehq/airbyte/issues/3549) | Add optional arg to make a possibility to change the BigQuery client's chunk\buffer size | -| 0.3.11 | 2021-07-30 | [\#5125](https://github.com/airbytehq/airbyte/pull/5125) | Enable `additionalPropertities` in spec.json | -| 0.3.10 | 2021-07-28 | [\#3549](https://github.com/airbytehq/airbyte/issues/3549) | Add extended logs and made JobId filled with region and projectId | -| 0.3.9 | 2021-07-28 | [\#5026](https://github.com/airbytehq/airbyte/pull/5026) | Add sanitized json fields in raw tables to handle quotes in column names | -| 0.3.6 | 2021-06-18 | [\#3947](https://github.com/airbytehq/airbyte/issues/3947) | Service account credentials are now optional. | -| 0.3.4 | 2021-06-07 | [\#3277](https://github.com/airbytehq/airbyte/issues/3277) | Add dataset location option | +| Version | Date | Pull Request | Subject | +|:--------|:-----------|:-----------------------------------------------------------|:------------------------------------------------------------------------------------------------| +| 1.1.5 | 2022-05-12 | [12805](https://github.com/airbytehq/airbyte/pull/12805) | Updated to latest base-java to emit AirbyteTraceMessage on error. | +| 1.1.4 | 2022-05-04 | [12578](https://github.com/airbytehq/airbyte/pull/12578) | In JSON to Avro conversion, log JSON field values that do not follow Avro schema for debugging. | +| 1.1.3 | 2022-05-02 | [12528](https://github.com/airbytehq/airbyte/pull/12528) | Update Dataset location field description | +| 1.1.2 | 2022-04-29 | [12477](https://github.com/airbytehq/airbyte/pull/12477) | Dataset location is a required field | +| 1.1.1 | 2022-04-15 | [12068](https://github.com/airbytehq/airbyte/pull/12068) | Fixed bug with GCS bucket conditional binding | +| 1.1.0 | 2022-04-06 | [11776](https://github.com/airbytehq/airbyte/pull/11776) | Use serialized buffering strategy to reduce memory consumption. | +| 1.0.2 | 2022-03-30 | [11620](https://github.com/airbytehq/airbyte/pull/11620) | Updated spec | +| 1.0.1 | 2022-03-24 | [11350](https://github.com/airbytehq/airbyte/pull/11350) | Improve check performance | +| 1.0.0 | 2022-03-18 | [11238](https://github.com/airbytehq/airbyte/pull/11238) | Updated spec and documentation | +| 0.6.12 | 2022-03-18 | [10793](https://github.com/airbytehq/airbyte/pull/10793) | Fix namespace with invalid characters | +| 0.6.11 | 2022-03-03 | [10755](https://github.com/airbytehq/airbyte/pull/10755) | Make sure to kill children threads and stop JVM | +| 0.6.8 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `-XX:+ExitOnOutOfMemoryError` JVM option | +| 0.6.6 | 2022-02-01 | [\#9959](https://github.com/airbytehq/airbyte/pull/9959) | Fix null pointer exception from buffered stream consumer. | +| 0.6.6 | 2022-01-29 | [\#9745](https://github.com/airbytehq/airbyte/pull/9745) | Integrate with Sentry. | +| 0.6.5 | 2022-01-18 | [\#9573](https://github.com/airbytehq/airbyte/pull/9573) | BigQuery Destination : update description for some input fields | +| 0.6.4 | 2022-01-17 | [\#8383](https://github.com/airbytehq/airbyte/issues/8383) | Support dataset-id prefixed by project-id | +| 0.6.3 | 2022-01-12 | [\#9415](https://github.com/airbytehq/airbyte/pull/9415) | BigQuery Destination : Fix GCS processing of Facebook data | +| 0.6.2 | 2022-01-10 | [\#9121](https://github.com/airbytehq/airbyte/pull/9121) | Fixed check method for GCS mode to verify if all roles assigned to user | +| 0.6.1 | 2021-12-22 | [\#9039](https://github.com/airbytehq/airbyte/pull/9039) | Added part_size configuration to UI for GCS staging | +| 0.6.0 | 2021-12-17 | [\#8788](https://github.com/airbytehq/airbyte/issues/8788) | BigQuery/BiqQuery denorm Destinations : Add possibility to use different types of GCS files | +| 0.5.1 | 2021-12-16 | [\#8816](https://github.com/airbytehq/airbyte/issues/8816) | Update dataset locations | +| 0.5.0 | 2021-10-26 | [\#7240](https://github.com/airbytehq/airbyte/issues/7240) | Output partitioned/clustered tables | +| 0.4.1 | 2021-10-04 | [\#6733](https://github.com/airbytehq/airbyte/issues/6733) | Support dataset starting with numbers | +| 0.4.0 | 2021-08-26 | [\#5296](https://github.com/airbytehq/airbyte/issues/5296) | Added GCS Staging uploading option | +| 0.3.12 | 2021-08-03 | [\#3549](https://github.com/airbytehq/airbyte/issues/3549) | Add optional arg to make a possibility to change the BigQuery client's chunk\buffer size | +| 0.3.11 | 2021-07-30 | [\#5125](https://github.com/airbytehq/airbyte/pull/5125) | Enable `additionalPropertities` in spec.json | +| 0.3.10 | 2021-07-28 | [\#3549](https://github.com/airbytehq/airbyte/issues/3549) | Add extended logs and made JobId filled with region and projectId | +| 0.3.9 | 2021-07-28 | [\#5026](https://github.com/airbytehq/airbyte/pull/5026) | Add sanitized json fields in raw tables to handle quotes in column names | +| 0.3.6 | 2021-06-18 | [\#3947](https://github.com/airbytehq/airbyte/issues/3947) | Service account credentials are now optional. | +| 0.3.4 | 2021-06-07 | [\#3277](https://github.com/airbytehq/airbyte/issues/3277) | Add dataset location option | ### bigquery-denormalized -| Version | Date | Pull Request | Subject | -|:--------|:-----------|:-----------------------------------------------------------| :--- | -| 1.1.4 | 2022-05-04 | [12578](https://github.com/airbytehq/airbyte/pull/12578) | In JSON to Avro conversion, log JSON field values that do not follow Avro schema for debugging. | -| 0.3.3 | 2022-05-02 | [12528](https://github.com/airbytehq/airbyte/pull/12528) | Update Dataset location field description | -| 0.3.2 | 2022-04-29 | [12477](https://github.com/airbytehq/airbyte/pull/12477) | Dataset location is a required field | -| 0.3.1 | 2022-04-15 | [11978](https://github.com/airbytehq/airbyte/pull/11978) | Fixed emittedAt timestamp. | -| 0.3.0 | 2022-04-06 | [11776](https://github.com/airbytehq/airbyte/pull/11776) | Use serialized buffering strategy to reduce memory consumption. | -| 0.2.15 | 2022-04-05 | [11166](https://github.com/airbytehq/airbyte/pull/11166) | Fixed handling of anyOf and allOf fields | -| 0.2.14 | 2022-04-02 | [11620](https://github.com/airbytehq/airbyte/pull/11620) | Updated spec | -| 0.2.13 | 2022-04-01 | [11636](https://github.com/airbytehq/airbyte/pull/11636) | Added new unit tests | -| 0.2.12 | 2022-03-28 | [11454](https://github.com/airbytehq/airbyte/pull/11454) | Integration test enhancement for picking test-data and schemas | -| 0.2.11 | 2022-03-18 | [10793](https://github.com/airbytehq/airbyte/pull/10793) | Fix namespace with invalid characters | -| 0.2.10 | 2022-03-03 | [10755](https://github.com/airbytehq/airbyte/pull/10755) | Make sure to kill children threads and stop JVM | -| 0.2.8 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `-XX:+ExitOnOutOfMemoryError` JVM option | -| 0.2.7 | 2022-02-01 | [\#9959](https://github.com/airbytehq/airbyte/pull/9959) | Fix null pointer exception from buffered stream consumer. | -| 0.2.6 | 2022-01-29 | [\#9745](https://github.com/airbytehq/airbyte/pull/9745) | Integrate with Sentry. | -| 0.2.5 | 2022-01-18 | [\#9573](https://github.com/airbytehq/airbyte/pull/9573) | BigQuery Destination : update description for some input fields | -| 0.2.4 | 2022-01-17 | [\#8383](https://github.com/airbytehq/airbyte/issues/8383) | BigQuery/BiqQuery denorm Destinations : Support dataset-id prefixed by project-id | -| 0.2.3 | 2022-01-12 | [\#9415](https://github.com/airbytehq/airbyte/pull/9415) | BigQuery Destination : Fix GCS processing of Facebook data | -| 0.2.2 | 2021-12-22 | [\#9039](https://github.com/airbytehq/airbyte/pull/9039) | Added part_size configuration to UI for GCS staging | -| 0.2.1 | 2021-12-21 | [\#8574](https://github.com/airbytehq/airbyte/pull/8574) | Added namespace to Avro and Parquet record types | -| 0.2.0 | 2021-12-17 | [\#8788](https://github.com/airbytehq/airbyte/pull/8788) | BigQuery/BiqQuery denorm Destinations : Add possibility to use different types of GCS files | -| 0.1.11 | 2021-12-16 | [\#8816](https://github.com/airbytehq/airbyte/issues/8816) | Update dataset locations | -| 0.1.10 | 2021-11-09 | [\#7804](https://github.com/airbytehq/airbyte/pull/7804) | handle null values in fields described by a $ref definition | -| 0.1.9 | 2021-11-08 | [\#7736](https://github.com/airbytehq/airbyte/issues/7736) | Fixed the handling of ObjectNodes with $ref definition key | -| 0.1.8 | 2021-10-27 | [\#7413](https://github.com/airbytehq/airbyte/issues/7413) | Fixed DATETIME conversion for BigQuery | -| 0.1.7 | 2021-10-26 | [\#7240](https://github.com/airbytehq/airbyte/issues/7240) | Output partitioned/clustered tables | -| 0.1.6 | 2021-09-16 | [\#6145](https://github.com/airbytehq/airbyte/pull/6145) | BigQuery Denormalized support for date, datetime & timestamp types through the json "format" key | -| 0.1.5 | 2021-09-07 | [\#5881](https://github.com/airbytehq/airbyte/pull/5881) | BigQuery Denormalized NPE fix | +| Version | Date | Pull Request | Subject | +|:--------|:-----------|:-----------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------| +| 0.3.5 | 2022-05-12 | [12805](https://github.com/airbytehq/airbyte/pull/12805) | Updated to latest base-java to emit AirbyteTraceMessage on error. | +| 0.3.4 | 2022-05-04 | [12578](https://github.com/airbytehq/airbyte/pull/12578) | In JSON to Avro conversion, log JSON field values that do not follow Avro schema for debugging. | +| 0.3.3 | 2022-05-02 | [12528](https://github.com/airbytehq/airbyte/pull/12528) | Update Dataset location field description | +| 0.3.2 | 2022-04-29 | [12477](https://github.com/airbytehq/airbyte/pull/12477) | Dataset location is a required field | +| 0.3.1 | 2022-04-15 | [11978](https://github.com/airbytehq/airbyte/pull/11978) | Fixed emittedAt timestamp. | +| 0.3.0 | 2022-04-06 | [11776](https://github.com/airbytehq/airbyte/pull/11776) | Use serialized buffering strategy to reduce memory consumption. | +| 0.2.15 | 2022-04-05 | [11166](https://github.com/airbytehq/airbyte/pull/11166) | Fixed handling of anyOf and allOf fields | +| 0.2.14 | 2022-04-02 | [11620](https://github.com/airbytehq/airbyte/pull/11620) | Updated spec | +| 0.2.13 | 2022-04-01 | [11636](https://github.com/airbytehq/airbyte/pull/11636) | Added new unit tests | +| 0.2.12 | 2022-03-28 | [11454](https://github.com/airbytehq/airbyte/pull/11454) | Integration test enhancement for picking test-data and schemas | +| 0.2.11 | 2022-03-18 | [10793](https://github.com/airbytehq/airbyte/pull/10793) | Fix namespace with invalid characters | +| 0.2.10 | 2022-03-03 | [10755](https://github.com/airbytehq/airbyte/pull/10755) | Make sure to kill children threads and stop JVM | +| 0.2.8 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `-XX:+ExitOnOutOfMemoryError` JVM option | +| 0.2.7 | 2022-02-01 | [\#9959](https://github.com/airbytehq/airbyte/pull/9959) | Fix null pointer exception from buffered stream consumer. | +| 0.2.6 | 2022-01-29 | [\#9745](https://github.com/airbytehq/airbyte/pull/9745) | Integrate with Sentry. | +| 0.2.5 | 2022-01-18 | [\#9573](https://github.com/airbytehq/airbyte/pull/9573) | BigQuery Destination : update description for some input fields | +| 0.2.4 | 2022-01-17 | [\#8383](https://github.com/airbytehq/airbyte/issues/8383) | BigQuery/BiqQuery denorm Destinations : Support dataset-id prefixed by project-id | +| 0.2.3 | 2022-01-12 | [\#9415](https://github.com/airbytehq/airbyte/pull/9415) | BigQuery Destination : Fix GCS processing of Facebook data | +| 0.2.2 | 2021-12-22 | [\#9039](https://github.com/airbytehq/airbyte/pull/9039) | Added part_size configuration to UI for GCS staging | +| 0.2.1 | 2021-12-21 | [\#8574](https://github.com/airbytehq/airbyte/pull/8574) | Added namespace to Avro and Parquet record types | +| 0.2.0 | 2021-12-17 | [\#8788](https://github.com/airbytehq/airbyte/pull/8788) | BigQuery/BiqQuery denorm Destinations : Add possibility to use different types of GCS files | +| 0.1.11 | 2021-12-16 | [\#8816](https://github.com/airbytehq/airbyte/issues/8816) | Update dataset locations | +| 0.1.10 | 2021-11-09 | [\#7804](https://github.com/airbytehq/airbyte/pull/7804) | handle null values in fields described by a $ref definition | +| 0.1.9 | 2021-11-08 | [\#7736](https://github.com/airbytehq/airbyte/issues/7736) | Fixed the handling of ObjectNodes with $ref definition key | +| 0.1.8 | 2021-10-27 | [\#7413](https://github.com/airbytehq/airbyte/issues/7413) | Fixed DATETIME conversion for BigQuery | +| 0.1.7 | 2021-10-26 | [\#7240](https://github.com/airbytehq/airbyte/issues/7240) | Output partitioned/clustered tables | +| 0.1.6 | 2021-09-16 | [\#6145](https://github.com/airbytehq/airbyte/pull/6145) | BigQuery Denormalized support for date, datetime & timestamp types through the json "format" key | +| 0.1.5 | 2021-09-07 | [\#5881](https://github.com/airbytehq/airbyte/pull/5881) | BigQuery Denormalized NPE fix | | 0.1.4 | 2021-09-04 | [\#5813](https://github.com/airbytehq/airbyte/pull/5813) | fix Stackoverflow error when receive a schema from source where "Array" type doesn't contain a required "items" element | -| 0.1.3 | 2021-08-07 | [\#5261](https://github.com/airbytehq/airbyte/pull/5261) | 🐛 Destination BigQuery\(Denormalized\): Fix processing arrays of records | -| 0.1.2 | 2021-07-30 | [\#5125](https://github.com/airbytehq/airbyte/pull/5125) | Enable `additionalPropertities` in spec.json | -| 0.1.1 | 2021-06-21 | [\#3555](https://github.com/airbytehq/airbyte/pull/3555) | Partial Success in BufferedStreamConsumer | -| 0.1.0 | 2021-06-21 | [\#4176](https://github.com/airbytehq/airbyte/pull/4176) | Destination using Typed Struct and Repeated fields | +| 0.1.3 | 2021-08-07 | [\#5261](https://github.com/airbytehq/airbyte/pull/5261) | 🐛 Destination BigQuery\(Denormalized\): Fix processing arrays of records | +| 0.1.2 | 2021-07-30 | [\#5125](https://github.com/airbytehq/airbyte/pull/5125) | Enable `additionalPropertities` in spec.json | +| 0.1.1 | 2021-06-21 | [\#3555](https://github.com/airbytehq/airbyte/pull/3555) | Partial Success in BufferedStreamConsumer | +| 0.1.0 | 2021-06-21 | [\#4176](https://github.com/airbytehq/airbyte/pull/4176) | Destination using Typed Struct and Repeated fields | diff --git a/docs/integrations/destinations/snowflake.md b/docs/integrations/destinations/snowflake.md index e2a404ad55fa7..6752d0322d580 100644 --- a/docs/integrations/destinations/snowflake.md +++ b/docs/integrations/destinations/snowflake.md @@ -234,43 +234,44 @@ Now that you have set up the Snowflake destination connector, check out the foll ## Changelog -| Version | Date | Pull Request | Subject | -|:--------|:-----------| :----- | :------ | -| 0.4.25 | 2022-05-03 | [\#12452](https://github.com/airbytehq/airbyte/pull/12452) | Add support for encrypted staging on S3; fix the purge_staging_files option | -| 0.4.24 | 2022-03-24 | [\#11093](https://github.com/airbytehq/airbyte/pull/11093) | Added OAuth support (Compatible with Airbyte Version 0.35.60+)| -| 0.4.22 | 2022-03-18 | [\#10793](https://github.com/airbytehq/airbyte/pull/10793) | Fix namespace with invalid characters | -| 0.4.21 | 2022-03-18 | [\#11071](https://github.com/airbytehq/airbyte/pull/11071) | Switch to compressed on-disk buffering before staging to s3/internal stage | -| 0.4.20 | 2022-03-14 | [\#10341](https://github.com/airbytehq/airbyte/pull/10341) | Add Azure blob staging support | -| 0.4.19 | 2022-03-11 | [10699](https://github.com/airbytehq/airbyte/pull/10699) | Added unit tests | -| 0.4.17 | 2022-02-25 | [10421](https://github.com/airbytehq/airbyte/pull/10421) | Refactor JDBC parameters handling | -| 0.4.16 | 2022-02-25 | [\#10627](https://github.com/airbytehq/airbyte/pull/10627) | Add try catch to make sure all handlers are closed | -| 0.4.15 | 2022-02-22 | [\#10459](https://github.com/airbytehq/airbyte/pull/10459) | Add FailureTrackingAirbyteMessageConsumer | -| 0.4.14 | 2022-02-17 | [\#10394](https://github.com/airbytehq/airbyte/pull/10394) | Reduce memory footprint. | -| 0.4.13 | 2022-02-16 | [\#10212](https://github.com/airbytehq/airbyte/pull/10212) | Execute COPY command in parallel for S3 and GCS staging | -| 0.4.12 | 2022-02-15 | [\#10342](https://github.com/airbytehq/airbyte/pull/10342) | Use connection pool, and fix connection leak. | -| 0.4.11 | 2022-02-14 | [\#9920](https://github.com/airbytehq/airbyte/pull/9920) | Updated the size of staging files for S3 staging. Also, added closure of S3 writers to staging files when data has been written to an staging file. | -| 0.4.10 | 2022-02-14 | [\#10297](https://github.com/airbytehq/airbyte/pull/10297) | Halve the record buffer size to reduce memory consumption. | -| 0.4.9 | 2022-02-14 | [\#10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `ExitOnOutOfMemoryError` JVM flag. | -| 0.4.8 | 2022-02-01 | [\#9959](https://github.com/airbytehq/airbyte/pull/9959) | Fix null pointer exception from buffered stream consumer. | -| 0.4.7 | 2022-01-29 | [\#9745](https://github.com/airbytehq/airbyte/pull/9745) | Integrate with Sentry. | -| 0.4.6 | 2022-01-28 | [#9623](https://github.com/airbytehq/airbyte/pull/9623) | Add jdbc_url_params support for optional JDBC parameters | -| 0.4.5 | 2021-12-29 | [#9184](https://github.com/airbytehq/airbyte/pull/9184) | Update connector fields title/description | -| 0.4.4 | 2022-01-24 | [#9743](https://github.com/airbytehq/airbyte/pull/9743) | Fixed bug with dashes in schema name | -| 0.4.3 | 2022-01-20 | [#9531](https://github.com/airbytehq/airbyte/pull/9531) | Start using new S3StreamCopier and expose the purgeStagingData option | -| 0.4.2 | 2022-01-10 | [#9141](https://github.com/airbytehq/airbyte/pull/9141) | Fixed duplicate rows on retries | -| 0.4.1 | 2021-01-06 | [#9311](https://github.com/airbytehq/airbyte/pull/9311) | Update сreating schema during check | -| 0.4.0 | 2021-12-27 | [#9063](https://github.com/airbytehq/airbyte/pull/9063) | Updated normalization to produce permanent tables | -| 0.3.24 | 2021-12-23 | [#8869](https://github.com/airbytehq/airbyte/pull/8869) | Changed staging approach to Byte-Buffered | -| 0.3.23 | 2021-12-22 | [#9039](https://github.com/airbytehq/airbyte/pull/9039) | Added part_size configuration in UI for S3 loading method | -| 0.3.22 | 2021-12-21 | [#9006](https://github.com/airbytehq/airbyte/pull/9006) | Updated jdbc schema naming to follow Snowflake Naming Conventions | -| 0.3.21 | 2021-12-15 | [#8781](https://github.com/airbytehq/airbyte/pull/8781) | Updated check method to verify permissions to create/drop stage for internal staging; compatibility fix for Java 17 | -| 0.3.20 | 2021-12-10 | [#8562](https://github.com/airbytehq/airbyte/pull/8562) | Moving classes around for better dependency management; compatibility fix for Java 17 | -| 0.3.19 | 2021-12-06 | [#8528](https://github.com/airbytehq/airbyte/pull/8528) | Set Internal Staging as default choice | -| 0.3.18 | 2021-11-26 | [#8253](https://github.com/airbytehq/airbyte/pull/8253) | Snowflake Internal Staging Support | -| 0.3.17 | 2021-11-08 | [#7719](https://github.com/airbytehq/airbyte/pull/7719) | Improve handling of wide rows by buffering records based on their byte size rather than their count | -| 0.3.15 | 2021-10-11 | [#6949](https://github.com/airbytehq/airbyte/pull/6949) | Each stream was split into files of 10,000 records each for copying using S3 or GCS | -| 0.3.14 | 2021-09-08 | [#5924](https://github.com/airbytehq/airbyte/pull/5924) | Fixed AWS S3 Staging COPY is writing records from different table in the same raw table | -| 0.3.13 | 2021-09-01 | [#5784](https://github.com/airbytehq/airbyte/pull/5784) | Updated query timeout from 30 minutes to 3 hours | -| 0.3.12 | 2021-07-30 | [#5125](https://github.com/airbytehq/airbyte/pull/5125) | Enable `additionalPropertities` in spec.json | -| 0.3.11 | 2021-07-21 | [#3555](https://github.com/airbytehq/airbyte/pull/3555) | Partial Success in BufferedStreamConsumer | -| 0.3.10 | 2021-07-12 | [#4713](https://github.com/airbytehq/airbyte/pull/4713)| Tag traffic with `airbyte` label to enable optimization opportunities from Snowflake | +| Version | Date | Pull Request | Subject | +|:--------|:-----------|:-----------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------| +| 0.4.26 | 2022-05-12 | [\#12805](https://github.com/airbytehq/airbyte/pull/12805) | Updated to latest base-java to emit AirbyteTraceMessages on error. | +| 0.4.25 | 2022-05-03 | [\#12452](https://github.com/airbytehq/airbyte/pull/12452) | Add support for encrypted staging on S3; fix the purge_staging_files option | +| 0.4.24 | 2022-03-24 | [\#11093](https://github.com/airbytehq/airbyte/pull/11093) | Added OAuth support (Compatible with Airbyte Version 0.35.60+) | +| 0.4.22 | 2022-03-18 | [\#10793](https://github.com/airbytehq/airbyte/pull/10793) | Fix namespace with invalid characters | +| 0.4.21 | 2022-03-18 | [\#11071](https://github.com/airbytehq/airbyte/pull/11071) | Switch to compressed on-disk buffering before staging to s3/internal stage | +| 0.4.20 | 2022-03-14 | [\#10341](https://github.com/airbytehq/airbyte/pull/10341) | Add Azure blob staging support | +| 0.4.19 | 2022-03-11 | [10699](https://github.com/airbytehq/airbyte/pull/10699) | Added unit tests | +| 0.4.17 | 2022-02-25 | [10421](https://github.com/airbytehq/airbyte/pull/10421) | Refactor JDBC parameters handling | +| 0.4.16 | 2022-02-25 | [\#10627](https://github.com/airbytehq/airbyte/pull/10627) | Add try catch to make sure all handlers are closed | +| 0.4.15 | 2022-02-22 | [\#10459](https://github.com/airbytehq/airbyte/pull/10459) | Add FailureTrackingAirbyteMessageConsumer | +| 0.4.14 | 2022-02-17 | [\#10394](https://github.com/airbytehq/airbyte/pull/10394) | Reduce memory footprint. | +| 0.4.13 | 2022-02-16 | [\#10212](https://github.com/airbytehq/airbyte/pull/10212) | Execute COPY command in parallel for S3 and GCS staging | +| 0.4.12 | 2022-02-15 | [\#10342](https://github.com/airbytehq/airbyte/pull/10342) | Use connection pool, and fix connection leak. | +| 0.4.11 | 2022-02-14 | [\#9920](https://github.com/airbytehq/airbyte/pull/9920) | Updated the size of staging files for S3 staging. Also, added closure of S3 writers to staging files when data has been written to an staging file. | +| 0.4.10 | 2022-02-14 | [\#10297](https://github.com/airbytehq/airbyte/pull/10297) | Halve the record buffer size to reduce memory consumption. | +| 0.4.9 | 2022-02-14 | [\#10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `ExitOnOutOfMemoryError` JVM flag. | +| 0.4.8 | 2022-02-01 | [\#9959](https://github.com/airbytehq/airbyte/pull/9959) | Fix null pointer exception from buffered stream consumer. | +| 0.4.7 | 2022-01-29 | [\#9745](https://github.com/airbytehq/airbyte/pull/9745) | Integrate with Sentry. | +| 0.4.6 | 2022-01-28 | [#9623](https://github.com/airbytehq/airbyte/pull/9623) | Add jdbc_url_params support for optional JDBC parameters | +| 0.4.5 | 2021-12-29 | [#9184](https://github.com/airbytehq/airbyte/pull/9184) | Update connector fields title/description | +| 0.4.4 | 2022-01-24 | [#9743](https://github.com/airbytehq/airbyte/pull/9743) | Fixed bug with dashes in schema name | +| 0.4.3 | 2022-01-20 | [#9531](https://github.com/airbytehq/airbyte/pull/9531) | Start using new S3StreamCopier and expose the purgeStagingData option | +| 0.4.2 | 2022-01-10 | [#9141](https://github.com/airbytehq/airbyte/pull/9141) | Fixed duplicate rows on retries | +| 0.4.1 | 2021-01-06 | [#9311](https://github.com/airbytehq/airbyte/pull/9311) | Update сreating schema during check | +| 0.4.0 | 2021-12-27 | [#9063](https://github.com/airbytehq/airbyte/pull/9063) | Updated normalization to produce permanent tables | +| 0.3.24 | 2021-12-23 | [#8869](https://github.com/airbytehq/airbyte/pull/8869) | Changed staging approach to Byte-Buffered | +| 0.3.23 | 2021-12-22 | [#9039](https://github.com/airbytehq/airbyte/pull/9039) | Added part_size configuration in UI for S3 loading method | +| 0.3.22 | 2021-12-21 | [#9006](https://github.com/airbytehq/airbyte/pull/9006) | Updated jdbc schema naming to follow Snowflake Naming Conventions | +| 0.3.21 | 2021-12-15 | [#8781](https://github.com/airbytehq/airbyte/pull/8781) | Updated check method to verify permissions to create/drop stage for internal staging; compatibility fix for Java 17 | +| 0.3.20 | 2021-12-10 | [#8562](https://github.com/airbytehq/airbyte/pull/8562) | Moving classes around for better dependency management; compatibility fix for Java 17 | +| 0.3.19 | 2021-12-06 | [#8528](https://github.com/airbytehq/airbyte/pull/8528) | Set Internal Staging as default choice | +| 0.3.18 | 2021-11-26 | [#8253](https://github.com/airbytehq/airbyte/pull/8253) | Snowflake Internal Staging Support | +| 0.3.17 | 2021-11-08 | [#7719](https://github.com/airbytehq/airbyte/pull/7719) | Improve handling of wide rows by buffering records based on their byte size rather than their count | +| 0.3.15 | 2021-10-11 | [#6949](https://github.com/airbytehq/airbyte/pull/6949) | Each stream was split into files of 10,000 records each for copying using S3 or GCS | +| 0.3.14 | 2021-09-08 | [#5924](https://github.com/airbytehq/airbyte/pull/5924) | Fixed AWS S3 Staging COPY is writing records from different table in the same raw table | +| 0.3.13 | 2021-09-01 | [#5784](https://github.com/airbytehq/airbyte/pull/5784) | Updated query timeout from 30 minutes to 3 hours | +| 0.3.12 | 2021-07-30 | [#5125](https://github.com/airbytehq/airbyte/pull/5125) | Enable `additionalPropertities` in spec.json | +| 0.3.11 | 2021-07-21 | [#3555](https://github.com/airbytehq/airbyte/pull/3555) | Partial Success in BufferedStreamConsumer | +| 0.3.10 | 2021-07-12 | [#4713](https://github.com/airbytehq/airbyte/pull/4713) | Tag traffic with `airbyte` label to enable optimization opportunities from Snowflake | From 259b2f585f08baaaf71c32c9c2c09f4993e63155 Mon Sep 17 00:00:00 2001 From: Oleksandr Sheheda Date: Thu, 12 May 2022 17:07:32 +0300 Subject: [PATCH 42/55] Follow up on #2655 Clean up kafka source connector docs (#12343) * 2655 Clean up kafka source connector docs * 2655 Clean up kafka source connector docs * 2655 Clean up kafka source connector docs --- .../connectors/source-kafka/README.md | 10 +-- docs/integrations/sources/kafka.md | 83 ++++++------------- 2 files changed, 29 insertions(+), 64 deletions(-) diff --git a/airbyte-integrations/connectors/source-kafka/README.md b/airbyte-integrations/connectors/source-kafka/README.md index 54126ee0d0f8e..d658d739c97ac 100644 --- a/airbyte-integrations/connectors/source-kafka/README.md +++ b/airbyte-integrations/connectors/source-kafka/README.md @@ -43,16 +43,10 @@ We use `JUnit` for Java tests. Place unit tests under `src/test/io/airbyte/integrations/source/kafka`. #### Acceptance Tests -Airbyte has a standard test suite that all destination connectors must pass. Implement the `TODO`s in -`src/test-integration/java/io/airbyte/integrations/source/KafkaSourceAcceptanceTest.java`. +Airbyte has a standard test suite that all source connectors must pass. ### Using gradle to run tests -All commands should be run from airbyte project root. -To run unit tests: -``` -./gradlew :airbyte-integrations:connectors:source-kafka:unitTest -``` -To run acceptance and custom integration tests: +All commands should be run from airbyte project root. To run acceptance and custom integration tests: ``` ./gradlew :airbyte-integrations:connectors:source-kafka:integrationTest ``` diff --git a/docs/integrations/sources/kafka.md b/docs/integrations/sources/kafka.md index 6fbd48a6595d9..6fa09ed5869a3 100644 --- a/docs/integrations/sources/kafka.md +++ b/docs/integrations/sources/kafka.md @@ -1,73 +1,44 @@ # Kafka -## Overview +This page guides you through the process of setting up the Kafka source connector. -The Airbyte Kafka source allows you to sync data from Kafka. Each Kafka topic is written to the corresponding stream. +# Set up guide -### Sync overview +## Step 1: Set up Kafka -#### Output schema +To use the Kafka source connector, you'll need: -Each Kafka topic will be output into a stream. +* [A Kafka cluster 1.0 or above](https://kafka.apache.org/quickstart) +* Airbyte user should be allowed to read messages from topics, and these topics should be created before reading from Kafka. -Currently, this connector only reads data with JSON format. More formats \(e.g. Apache Avro\) will be supported in the future. +## Step 2: Setup the Kafka source in Airbyte -#### Features +You'll need the following information to configure the Kafka source: -| Feature | Supported?\(Yes/No\) | Notes | -| :--- | :--- | :--- | -| Full Refresh Sync | Yes | | -| Incremental - Append Sync | Yes | | -| Namespaces | No | | - -## Getting started - -### Requirements - -To use the Kafka source, you'll need: - -* A Kafka cluster 1.0 or above. - -### Setup guide - -#### Network Access +* **Group ID** - The Group ID is how you distinguish different consumer groups. (e.g. group.id) +* **Protocol** - The Protocol used to communicate with brokers. +* **Client ID** - An ID string to pass to the server when making requests. The purpose of this is to be able to track the source of requests beyond just ip/port by allowing a logical application name to be included in server-side request logging. (e.g. airbyte-consumer) +* **Test Topic** - The Topic to test in case the Airbyte can consume messages. (e.g. test.topic) +* **Subscription Method** - You can choose to manually assign a list of partitions, or subscribe to all topics matching specified pattern to get dynamically assigned partitions. +* **List of topic** +* **Bootstrap Servers** - A list of host/port pairs to use for establishing the initial connection to the Kafka cluster. -Make sure your Kafka brokers can be accessed by Airbyte. +### For Airbyte Cloud: -#### **Permissions** +1. [Log into your Airbyte Cloud](https://cloud.airbyte.io/workspaces) account. +2. In the left navigation bar, click **Sources**. In the top-right corner, click **+new source**. +3. On the Set up the source page, enter the name for the Kafka connector and select **Kafka** from the Source type dropdown. +4. Follow the [Setup the Kafka source in Airbyte](kafka.md#Setup-the-Kafka-Source-in-Airbyte) -Airbyte should be allowed to read messages from topics, and these topics should be created before reading from Kafka. +## Supported sync modes -#### Target topics +The Kafka source connector supports the following[sync modes](https://docs.airbyte.com/cloud/core-concepts#connection-sync-modes): -You can determine the topics from which messages are read via the `topic_pattern` configuration parameter. Messages can be read from a hardcoded, pre-defined topic. - -To read all messages from a single hardcoded topic, enter its name in the `topic_pattern` field e.g: setting `topic_pattern` to `my-topic-name` will read all messages from that topic. - -You can determine the topic partitions from which messages are read via the `topic_partitions` configuration parameter. - -### Setup the Kafka destination in Airbyte - -You should now have all the requirements needed to configure Kafka as a destination in the UI. You can configure the following parameters on the Kafka destination \(though many of these are optional or have default values\): - -* **Bootstrap servers** -* **Topic pattern** -* **Topic partition** -* **Test topic** -* **Group ID** -* **Max poll records** -* **SASL JAAS config** -* **SASL mechanism** -* **Client ID** -* **Enable auto commit** -* **Auto commit interval ms** -* **Client DNS lookup** -* **Retry backoff ms** -* **Request timeout ms** -* **Receive buffer bytes** -* **Repeated calls** - -More info about this can be found in the [Kafka consumer configs documentation site](https://kafka.apache.org/documentation/#consumerconfigs). +| Feature | Supported?\(Yes/No\) | Notes | +| :--- | :--- | :--- | +| Full Refresh Sync | Yes | | +| Incremental - Append Sync | Yes | | +| Namespaces | No | | ## Changelog From 3f59d6c1e39e522340240f19890d457f4ffdb6f2 Mon Sep 17 00:00:00 2001 From: Teal Larson Date: Thu, 12 May 2022 10:17:21 -0400 Subject: [PATCH 43/55] center align group label in connector form (#12811) --- .../ServiceForm/components/Sections/ConditionSection.tsx | 3 +++ 1 file changed, 3 insertions(+) diff --git a/airbyte-webapp/src/views/Connector/ServiceForm/components/Sections/ConditionSection.tsx b/airbyte-webapp/src/views/Connector/ServiceForm/components/Sections/ConditionSection.tsx index c18507266e2a7..384f1feb6d0af 100644 --- a/airbyte-webapp/src/views/Connector/ServiceForm/components/Sections/ConditionSection.tsx +++ b/airbyte-webapp/src/views/Connector/ServiceForm/components/Sections/ConditionSection.tsx @@ -16,7 +16,10 @@ import { FormSection } from "./FormSection"; const GroupLabel = styled(Label)` width: auto; margin-right: 8px; + padding-top: 8px; display: inline-block; + padding-bottom: 0px; + vertical-align: middle; `; const ConditionControls = styled.div` From 34ba6aa8fbeb8379b7b95ef0deb63a6c926bae12 Mon Sep 17 00:00:00 2001 From: Marcos Marx Date: Thu, 12 May 2022 11:35:58 -0300 Subject: [PATCH 44/55] correct link to syncmode page (#12793) --- airbyte-webapp/src/config/uiConfig.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/airbyte-webapp/src/config/uiConfig.ts b/airbyte-webapp/src/config/uiConfig.ts index 6e8d7f52959ff..a0f14d9a0d69a 100644 --- a/airbyte-webapp/src/config/uiConfig.ts +++ b/airbyte-webapp/src/config/uiConfig.ts @@ -17,7 +17,7 @@ const uiConfig = { tutorialLink: "https://www.youtube.com/watch?v=Rcpt5SVsMpk&feature=emb_logo", statusLink: "https://status.airbyte.io/", recipesLink: "https://airbyte.com/recipes", - syncModeLink: `${BASE_DOCS_LINK}/understanding-airbyte/connections/incremental-deduped-history`, + syncModeLink: `${BASE_DOCS_LINK}/understanding-airbyte/connections`, demoLink: "https://demo.airbyte.io", contactSales: "https://airbyte.com/talk-to-sales", webpageLink: "https://airbyte.com", From 453680ff27ff6af2b20c1d23d5cec95025be9612 Mon Sep 17 00:00:00 2001 From: Teal Larson Date: Thu, 12 May 2022 11:13:18 -0400 Subject: [PATCH 45/55] Update incorrect docs links in definition seed files (#12750) * update incorrect docs links in definition seed files * cleanup --- .../main/resources/seed/destination_definitions.yaml | 2 +- .../src/main/resources/seed/source_definitions.yaml | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml index 4edc6ce194c51..b1d7afcfaa989 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml @@ -15,7 +15,7 @@ destinationDefinitionId: 0eeee7fb-518f-4045-bacc-9619e31c43ea dockerRepository: airbyte/destination-amazon-sqs dockerImageTag: 0.1.0 - documentationUrl: https://docs.airbyte.io/integrations/destinations/amazonsqs + documentationUrl: https://docs.airbyte.io/integrations/destinations/amazon-sqs icon: amazonsqs.svg releaseStage: alpha - name: AWS Datalake diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index a804d8e657c4a..23876bf2f510d 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -249,7 +249,7 @@ sourceDefinitionId: 010eb12f-837b-4685-892d-0a39f76a98f5 dockerRepository: airbyte/source-facebook-pages dockerImageTag: 0.1.6 - documentationUrl: https://hub.docker.com/r/airbyte/source-facebook-pages + documentationUrl: https://docs.airbyte.com/integrations/sources/facebook-pages icon: facebook.svg sourceType: api releaseStage: alpha @@ -407,7 +407,7 @@ sourceDefinitionId: 6acf6b55-4f1e-4fca-944e-1a3caef8aba8 dockerRepository: airbyte/source-instagram dockerImageTag: 0.1.9 - documentationUrl: https://hub.docker.com/r/airbyte/source-instagram + documentationUrl: https://docs.airbyte.com/integrations/sources/instagram icon: instagram.svg sourceType: api releaseStage: beta @@ -463,7 +463,7 @@ sourceDefinitionId: 789f8e7a-2d28-11ec-8d3d-0242ac130003 dockerRepository: airbyte/source-lemlist dockerImageTag: 0.1.0 - documentationUrl: https://docs.airbyte.io/integrations/sources/source-lemlist + documentationUrl: https://docs.airbyte.io/integrations/sources/lemlist sourceType: api releaseStage: alpha - name: Lever Hiring @@ -598,7 +598,7 @@ sourceDefinitionId: bb6afd81-87d5-47e3-97c4-e2c2901b1cf8 dockerRepository: airbyte/source-onesignal dockerImageTag: 0.1.2 - documentationUrl: https://docs.airbyte.io/integrations/sources/lever-onesignal + documentationUrl: https://docs.airbyte.io/integrations/sources/onesignal icon: onesignal.svg sourceType: api releaseStage: alpha @@ -955,7 +955,7 @@ name: YouTube Analytics dockerRepository: airbyte/source-youtube-analytics dockerImageTag: 0.1.0 - documentationUrl: https://docs.airbyte.io/integrations/sources/source-youtube-analytics + documentationUrl: https://docs.airbyte.io/integrations/sources/youtube-analytics icon: youtube.svg sourceType: api releaseStage: alpha From b3373b97422b9ec0b4127e3b1f1bf3202fe2af4f Mon Sep 17 00:00:00 2001 From: Benoit Moriceau Date: Thu, 12 May 2022 09:00:11 -0700 Subject: [PATCH 46/55] Update state message (#12586) What Update the airbyte state message to support the per stream state. The state message still contains the old way of storing the state in the data fields. It introduce 2 new fields to represent the global and the per stream state. --- .../airbyte_protocol/airbyte_protocol.yaml | 41 +++++++++++++++++-- 1 file changed, 38 insertions(+), 3 deletions(-) diff --git a/airbyte-protocol/models/src/main/resources/airbyte_protocol/airbyte_protocol.yaml b/airbyte-protocol/models/src/main/resources/airbyte_protocol/airbyte_protocol.yaml index 760c5e895ef45..545c1c02b5d9c 100644 --- a/airbyte-protocol/models/src/main/resources/airbyte_protocol/airbyte_protocol.yaml +++ b/airbyte-protocol/models/src/main/resources/airbyte_protocol/airbyte_protocol.yaml @@ -71,13 +71,48 @@ definitions: AirbyteStateMessage: type: object additionalProperties: true - required: - - data properties: + state_type: + "$ref": "#/definitions/AirbyteStateType" data: - description: "the state data" + description: "(Deprecated) the state data" type: object existingJavaType: com.fasterxml.jackson.databind.JsonNode + global: + "$ref": "#/definitions/AirbyteStateBlob" + streams: + type: array + items: + "$ref": "#/definitions/AirbyteStreamState" + + AirbyteStateType: + type: string + description: > + The type of state the other fields represent. + If not set, the state data is interpreted as GLOBAL and should be read from the `data` field for backwards compatibility. + GLOBAL means that the state should be read from `global` and means that it represents the state for all the streams. + PER_STREAM means that the state should be read from `streams`. Each item in the list represents the state for the associated stream. + enum: + - GLOBAL + - PER_STREAM + + AirbyteStreamState: + type: object + description: "per stream state data" + additionalProperties: false + properties: + name: + description: "Stream name" + type: string + state: + "$ref": "#/definitions/AirbyteStateBlob" + + AirbyteStateBlob: + type: object + description: "the state data" + additionalProperties: false + existingJavaType: com.fasterxml.jackson.databind.JsonNode + AirbyteLogMessage: type: object additionalProperties: true From a3658ba13ad1f6ec0ea24e726a317d167910a2bc Mon Sep 17 00:00:00 2001 From: Andy Date: Thu, 12 May 2022 09:16:00 -0700 Subject: [PATCH 47/55] Update spec to replace markdown (#12797) * Update spec to replace markdown * Update version * S3 Destination: Update change log * auto-bump connector version Co-authored-by: oneshcheret Co-authored-by: Octavia Squidington III --- .../seed/destination_definitions.yaml | 2 +- .../resources/seed/destination_specs.yaml | 39 +++++++++++-------- .../connectors/destination-s3/Dockerfile | 2 +- .../src/main/resources/spec.json | 22 +++++------ docs/integrations/destinations/s3.md | 1 + 5 files changed, 36 insertions(+), 30 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml index b1d7afcfaa989..8e7e095df0653 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml @@ -237,7 +237,7 @@ - name: S3 destinationDefinitionId: 4816b78f-1489-44c1-9060-4b19d5fa9362 dockerRepository: airbyte/destination-s3 - dockerImageTag: 0.3.4 + dockerImageTag: 0.3.5 documentationUrl: https://docs.airbyte.io/integrations/destinations/s3 icon: s3.svg resourceRequirements: diff --git a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml index 1609c343b9057..334831a8a467a 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml @@ -3651,7 +3651,7 @@ supported_destination_sync_modes: - "append" - "overwrite" -- dockerImage: "airbyte/destination-s3:0.3.4" +- dockerImage: "airbyte/destination-s3:0.3.5" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/s3" connectionSpecification: @@ -3668,8 +3668,9 @@ access_key_id: type: "string" description: "The access key ID to access the S3 bucket. Airbyte requires\ - \ Read and Write permissions to the given bucket. See [this](https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html#access-keys-and-secret-access-keys)\ - \ on how to generate an access key." + \ Read and Write permissions to the given bucket. Read more here." title: "S3 Key ID *" airbyte_secret: true examples: @@ -3677,34 +3678,37 @@ order: 0 secret_access_key: type: "string" - description: "The corresponding secret to the access key ID. See [this](https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html#access-keys-and-secret-access-keys)" + description: "The corresponding secret to the access key ID. Read more here" title: "S3 Access Key *" airbyte_secret: true examples: - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" order: 1 s3_bucket_name: - title: "S3 Bucket Name *" + title: "S3 Bucket Name" type: "string" - description: "The name of the S3 bucket. See [this](https://docs.aws.amazon.com/AmazonS3/latest/userguide/create-bucket-overview.html)\ - \ to create an S3 bucket." + description: "The name of the S3 bucket. Read more here." examples: - "airbyte_sync" order: 2 s3_bucket_path: - title: "S3 Bucket Path *" + title: "S3 Bucket Path" description: "Directory under the S3 bucket where data will be written.\ - \ See [this](https://docs.airbyte.com/integrations/destinations/s3#:~:text=to%20format%20the-,bucket%20path,-%3A)" + \ Read more here" type: "string" examples: - "data_sync/test" order: 3 s3_bucket_region: - title: "S3 Bucket Region *" + title: "S3 Bucket Region" type: "string" default: "" - description: "The region of the S3 bucket. See [this](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/using-regions-availability-zones.html#concepts-available-regions)\ - \ for all region codes." + description: "The region of the S3 bucket. See here for all region codes." enum: - "" - "us-east-1" @@ -3736,8 +3740,8 @@ format: title: "Output Format *" type: "object" - description: "Format of the data output. See [this](https://docs.airbyte.com/integrations/destinations/s3/#output-schema)\ - \ for more details" + description: "Format of the data output. See here for more details" oneOf: - title: "Avro: Apache Avro" required: @@ -4024,15 +4028,16 @@ title: "Endpoint (Optional)" type: "string" default: "" - description: "This is your S3 endpoint url. (If you are working with AWS\ - \ S3, you can leave blank). See [this](https://docs.aws.amazon.com/general/latest/gr/s3.html#:~:text=Service%20endpoints-,Amazon%20S3%20endpoints,-When%20you%20use)" + description: "Your S3 endpoint url. Read more here" examples: - "http://localhost:9000" order: 6 s3_path_format: title: "S3 Path Format (Optional)" description: "Format string on how data will be organized inside the S3\ - \ bucket directory. See [this](https://docs.airbyte.com/integrations/destinations/s3#:~:text=The%20full%20path%20of%20the%20output%20data%20with%20the%20default%20S3%20path%20format)" + \ bucket directory. Read more here" type: "string" examples: - "${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_" diff --git a/airbyte-integrations/connectors/destination-s3/Dockerfile b/airbyte-integrations/connectors/destination-s3/Dockerfile index c516933d05d18..f704941ed0568 100644 --- a/airbyte-integrations/connectors/destination-s3/Dockerfile +++ b/airbyte-integrations/connectors/destination-s3/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-s3 COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.3.4 +LABEL io.airbyte.version=0.3.5 LABEL io.airbyte.name=airbyte/destination-s3 diff --git a/airbyte-integrations/connectors/destination-s3/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-s3/src/main/resources/spec.json index 9f66df48798c0..47609d931db52 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/destination-s3/src/main/resources/spec.json @@ -18,7 +18,7 @@ "properties": { "access_key_id": { "type": "string", - "description": "The access key ID to access the S3 bucket. Airbyte requires Read and Write permissions to the given bucket. See [this](https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html#access-keys-and-secret-access-keys) on how to generate an access key.", + "description": "The access key ID to access the S3 bucket. Airbyte requires Read and Write permissions to the given bucket. Read more here.", "title": "S3 Key ID *", "airbyte_secret": true, "examples": ["A012345678910EXAMPLE"], @@ -26,31 +26,31 @@ }, "secret_access_key": { "type": "string", - "description": "The corresponding secret to the access key ID. See [this](https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html#access-keys-and-secret-access-keys)", + "description": "The corresponding secret to the access key ID. Read more here", "title": "S3 Access Key *", "airbyte_secret": true, "examples": ["a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY"], "order": 1 }, "s3_bucket_name": { - "title": "S3 Bucket Name *", + "title": "S3 Bucket Name", "type": "string", - "description": "The name of the S3 bucket. See [this](https://docs.aws.amazon.com/AmazonS3/latest/userguide/create-bucket-overview.html) to create an S3 bucket.", + "description": "The name of the S3 bucket. Read more here.", "examples": ["airbyte_sync"], "order": 2 }, "s3_bucket_path": { - "title": "S3 Bucket Path *", - "description": "Directory under the S3 bucket where data will be written. See [this](https://docs.airbyte.com/integrations/destinations/s3#:~:text=to%20format%20the-,bucket%20path,-%3A)", + "title": "S3 Bucket Path", + "description": "Directory under the S3 bucket where data will be written. Read more here", "type": "string", "examples": ["data_sync/test"], "order": 3 }, "s3_bucket_region": { - "title": "S3 Bucket Region *", + "title": "S3 Bucket Region", "type": "string", "default": "", - "description": "The region of the S3 bucket. See [this](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/using-regions-availability-zones.html#concepts-available-regions) for all region codes.", + "description": "The region of the S3 bucket. See here for all region codes.", "enum": [ "", "us-east-1", @@ -84,7 +84,7 @@ "format": { "title": "Output Format *", "type": "object", - "description": "Format of the data output. See [this](https://docs.airbyte.com/integrations/destinations/s3/#output-schema) for more details", + "description": "Format of the data output. See here for more details", "oneOf": [ { "title": "Avro: Apache Avro", @@ -382,13 +382,13 @@ "title": "Endpoint (Optional)", "type": "string", "default": "", - "description": "This is your S3 endpoint url. (If you are working with AWS S3, you can leave blank). See [this](https://docs.aws.amazon.com/general/latest/gr/s3.html#:~:text=Service%20endpoints-,Amazon%20S3%20endpoints,-When%20you%20use)", + "description": "Your S3 endpoint url. Read more here", "examples": ["http://localhost:9000"], "order": 6 }, "s3_path_format": { "title": "S3 Path Format (Optional)", - "description": "Format string on how data will be organized inside the S3 bucket directory. See [this](https://docs.airbyte.com/integrations/destinations/s3#:~:text=The%20full%20path%20of%20the%20output%20data%20with%20the%20default%20S3%20path%20format)", + "description": "Format string on how data will be organized inside the S3 bucket directory. Read more here", "type": "string", "examples": [ "${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_" diff --git a/docs/integrations/destinations/s3.md b/docs/integrations/destinations/s3.md index 1fa74ab3c547e..fddb0672c1330 100644 --- a/docs/integrations/destinations/s3.md +++ b/docs/integrations/destinations/s3.md @@ -315,6 +315,7 @@ In order for everything to work correctly, it is also necessary that the user wh | Version | Date | Pull Request | Subject | |:--------| :--- | :--- |:---------------------------------------------------------------------------------------------------------------------------| +| 0.3.5 | 2022-05-12 | [\#12797](https://github.com/airbytehq/airbyte/pull/12797) | Update spec to replace markdown. | | 0.3.4 | 2022-05-04 | [\#12578](https://github.com/airbytehq/airbyte/pull/12578) | In JSON to Avro conversion, log JSON field values that do not follow Avro schema for debugging. | | 0.3.3 | 2022-04-20 | [\#12167](https://github.com/airbytehq/airbyte/pull/12167) | Add gzip compression option for CSV and JSONL formats. | | 0.3.2 | 2022-04-22 | [\#11795](https://github.com/airbytehq/airbyte/pull/11795) | Fix the connection check to verify the provided bucket path. | From 9c5fb5e75be6f10114125d66370a7f341a21be5d Mon Sep 17 00:00:00 2001 From: Serhii Chvaliuk Date: Thu, 12 May 2022 21:37:16 +0300 Subject: [PATCH 48/55] fix getExpectedConsentUrl (#12816) --- .../test/java/io/airbyte/oauth/flows/HubspotOAuthFlowTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/HubspotOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/HubspotOAuthFlowTest.java index 40c4ec8cfc9b3..6fbdc9b0858f2 100644 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/HubspotOAuthFlowTest.java +++ b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/HubspotOAuthFlowTest.java @@ -15,7 +15,7 @@ protected BaseOAuthFlow getOAuthFlow() { @Override protected String getExpectedConsentUrl() { - return "https://app.hubspot.com/oauth/authorize?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&state=state&scopes=content+crm.schemas.deals.read+crm.objects.owners.read+forms+tickets+e-commerce+crm.objects.companies.read+crm.lists.read+crm.objects.deals.read+crm.schemas.contacts.read+crm.objects.contacts.read+crm.schemas.companies.read+files+forms-uploaded-files+files.ui_hidden.read"; + return "https://app.hubspot.com/oauth/authorize?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&state=state&scopes=content+crm.schemas.deals.read+crm.objects.owners.read+forms+tickets+e-commerce+crm.objects.companies.read+crm.lists.read+crm.objects.deals.read+crm.schemas.contacts.read+crm.objects.contacts.read+crm.schemas.companies.read+files+forms-uploaded-files+files.ui_hidden.read+crm.objects.feedback_submissions.read+sales-email-read+automation"; } } From 4e6380d2564f76dc86c909d53013db87cc2a3691 Mon Sep 17 00:00:00 2001 From: Edmundo Ruiz Ghanem <168664+edmundito@users.noreply.github.com> Date: Thu, 12 May 2022 14:38:32 -0400 Subject: [PATCH 49/55] Remove redirect to previous page after manual log out, expose redirect param, fix auth page titles (#12678) * Add loggedOut state to auth service to track when user manually logged out Only redirect to previous page if user logged out Update from state to be part of the query instead of internal to the router * Add page titles to Sign up, log in, and reset password pages * Remove react query queries on logout Fix Main view routes to redirect away from auth routes using the query instead of from state Remove stateUtils as it's no longer used * Remove unnecessary var in LoginPage then clause Co-authored-by: Krishna Glick Co-authored-by: Krishna Glick --- .../src/packages/cloud/cloudRoutes.tsx | 9 ++----- .../cloud/services/auth/AuthService.tsx | 8 +++--- .../packages/cloud/services/auth/reducer.ts | 4 +++ .../src/packages/cloud/views/auth/Auth.tsx | 7 +++++- .../cloud/views/auth/LoginPage/LoginPage.tsx | 25 +++++++++---------- .../ResetPasswordPage/ResetPasswordPage.tsx | 2 ++ .../views/auth/SignupPage/SignupPage.tsx | 2 ++ airbyte-webapp/src/utils/stateUtils.ts | 5 ---- 8 files changed, 33 insertions(+), 29 deletions(-) delete mode 100644 airbyte-webapp/src/utils/stateUtils.ts diff --git a/airbyte-webapp/src/packages/cloud/cloudRoutes.tsx b/airbyte-webapp/src/packages/cloud/cloudRoutes.tsx index ec3c6ba1a488f..3cfcbb7cd99bf 100644 --- a/airbyte-webapp/src/packages/cloud/cloudRoutes.tsx +++ b/airbyte-webapp/src/packages/cloud/cloudRoutes.tsx @@ -21,7 +21,6 @@ import DestinationPage from "pages/DestinationPage"; import OnboardingPage from "pages/OnboardingPage"; import SourcesPage from "pages/SourcesPage"; import { useCurrentWorkspace, WorkspaceServiceProvider } from "services/workspaces/WorkspacesService"; -import { hasFromState } from "utils/stateUtils"; import { storeUtmFromQuery } from "utils/utmStorage"; import { CompleteOauthRequest } from "views/CompleteOauthRequest"; @@ -106,16 +105,12 @@ const MainRoutes: React.FC = () => { const MainViewRoutes = () => { useApiHealthPoll(); useIntercom(); - const { location } = useRouter(); + const { query } = useRouter(); return ( {[CloudRoutes.Login, CloudRoutes.Signup, CloudRoutes.FirebaseAction].map((r) => ( - : } - /> + : } /> ))} } /> } /> diff --git a/airbyte-webapp/src/packages/cloud/services/auth/AuthService.tsx b/airbyte-webapp/src/packages/cloud/services/auth/AuthService.tsx index 68f401b8e4e63..bc9e6315b2185 100644 --- a/airbyte-webapp/src/packages/cloud/services/auth/AuthService.tsx +++ b/airbyte-webapp/src/packages/cloud/services/auth/AuthService.tsx @@ -36,11 +36,12 @@ export type AuthSendEmailVerification = () => Promise; export type AuthVerifyEmail = (code: string) => Promise; export type AuthLogout = () => void; -type AuthContextApi = { +interface AuthContextApi { user: User | null; inited: boolean; emailVerified: boolean; isLoading: boolean; + loggedOut: boolean; login: AuthLogin; signUp: AuthSignUp; updatePassword: AuthUpdatePassword; @@ -50,7 +51,7 @@ type AuthContextApi = { sendEmailVerification: AuthSendEmailVerification; verifyEmail: AuthVerifyEmail; logout: AuthLogout; -}; +} export const AuthContext = React.createContext(null); @@ -96,6 +97,7 @@ export const AuthenticationProvider: React.FC = ({ children }) => { inited: state.inited, isLoading: state.loading, emailVerified: state.emailVerified, + loggedOut: state.loggedOut, async login(values: { email: string; password: string }): Promise { await authService.login(values.email, values.password); @@ -105,8 +107,8 @@ export const AuthenticationProvider: React.FC = ({ children }) => { }, async logout(): Promise { await authService.signOut(); + queryClient.removeQueries(); loggedOut(); - await queryClient.invalidateQueries(); }, async updateEmail(email, password): Promise { await userService.changeEmail(email); diff --git a/airbyte-webapp/src/packages/cloud/services/auth/reducer.ts b/airbyte-webapp/src/packages/cloud/services/auth/reducer.ts index 5cb80251d3b7c..74eac730c8054 100644 --- a/airbyte-webapp/src/packages/cloud/services/auth/reducer.ts +++ b/airbyte-webapp/src/packages/cloud/services/auth/reducer.ts @@ -16,6 +16,7 @@ export type AuthServiceState = { currentUser: User | null; emailVerified: boolean; loading: boolean; + loggedOut: boolean; }; export const initialState: AuthServiceState = { @@ -23,6 +24,7 @@ export const initialState: AuthServiceState = { currentUser: null, emailVerified: false, loading: false, + loggedOut: false, }; export const authStateReducer = createReducer(initialState) @@ -39,6 +41,7 @@ export const authStateReducer = createReducer(initial emailVerified: action.payload.emailVerified, inited: true, loading: false, + loggedOut: false, }; }) .handleAction(actions.emailVerified, (state, action): AuthServiceState => { @@ -52,5 +55,6 @@ export const authStateReducer = createReducer(initial ...state, currentUser: null, emailVerified: false, + loggedOut: true, }; }); diff --git a/airbyte-webapp/src/packages/cloud/views/auth/Auth.tsx b/airbyte-webapp/src/packages/cloud/views/auth/Auth.tsx index fbf4dcb331ee3..0fb8bdcef5927 100644 --- a/airbyte-webapp/src/packages/cloud/views/auth/Auth.tsx +++ b/airbyte-webapp/src/packages/cloud/views/auth/Auth.tsx @@ -6,6 +6,7 @@ import { LoadingPage } from "components"; import useRouter from "hooks/useRouter"; import { CloudRoutes } from "packages/cloud/cloudRoutes"; +import { useAuthService } from "packages/cloud/services/auth/AuthService"; import { ResetPasswordAction } from "packages/cloud/views/FirebaseActionRoute"; import FormContent from "./components/FormContent"; @@ -39,6 +40,7 @@ const NewsPart = styled(Part)` const Auth: React.FC = () => { const { pathname, location } = useRouter(); + const { loggedOut } = useAuthService(); return ( @@ -50,7 +52,10 @@ const Auth: React.FC = () => { } /> } /> } /> - } /> + } + /> diff --git a/airbyte-webapp/src/packages/cloud/views/auth/LoginPage/LoginPage.tsx b/airbyte-webapp/src/packages/cloud/views/auth/LoginPage/LoginPage.tsx index 732f445228797..ab0bc67e88106 100644 --- a/airbyte-webapp/src/packages/cloud/views/auth/LoginPage/LoginPage.tsx +++ b/airbyte-webapp/src/packages/cloud/views/auth/LoginPage/LoginPage.tsx @@ -4,6 +4,7 @@ import { FormattedMessage, useIntl } from "react-intl"; import * as yup from "yup"; import { LabeledInput, Link, LoadingButton } from "components"; +import HeadTitle from "components/HeadTitle"; import useRouter from "hooks/useRouter"; import { CloudRoutes } from "packages/cloud/cloudRoutes"; @@ -20,10 +21,11 @@ const LoginPageValidationSchema = yup.object().shape({ const LoginPage: React.FC = () => { const formatMessage = useIntl().formatMessage; const { login } = useAuthService(); - const { location, replace } = useRouter(); + const { query, replace } = useRouter(); return (
    + @@ -35,18 +37,15 @@ const LoginPage: React.FC = () => { }} validationSchema={LoginPageValidationSchema} onSubmit={async (values, { setFieldError }) => { - return ( - login(values) - // @ts-expect-error state is now unkown, needs proper typing - .then((_) => replace(location.state?.from ?? "/")) - .catch((err) => { - if (err instanceof FieldError) { - setFieldError(err.field, err.message); - } else { - setFieldError("password", err.message); - } - }) - ); + return login(values) + .then(() => replace(query.from ?? "/")) + .catch((err) => { + if (err instanceof FieldError) { + setFieldError(err.field, err.message); + } else { + setFieldError("password", err.message); + } + }); }} validateOnBlur validateOnChange={false} diff --git a/airbyte-webapp/src/packages/cloud/views/auth/ResetPasswordPage/ResetPasswordPage.tsx b/airbyte-webapp/src/packages/cloud/views/auth/ResetPasswordPage/ResetPasswordPage.tsx index 382364266bd10..5b8308fc344c7 100644 --- a/airbyte-webapp/src/packages/cloud/views/auth/ResetPasswordPage/ResetPasswordPage.tsx +++ b/airbyte-webapp/src/packages/cloud/views/auth/ResetPasswordPage/ResetPasswordPage.tsx @@ -4,6 +4,7 @@ import { FormattedMessage, useIntl } from "react-intl"; import * as yup from "yup"; import { LoadingButton, LabeledInput, Link } from "components"; +import HeadTitle from "components/HeadTitle"; import { useNotificationService } from "hooks/services/Notification/NotificationService"; import { useAuthService } from "packages/cloud/services/auth/AuthService"; @@ -23,6 +24,7 @@ const ResetPasswordPage: React.FC = () => { return (
    + diff --git a/airbyte-webapp/src/packages/cloud/views/auth/SignupPage/SignupPage.tsx b/airbyte-webapp/src/packages/cloud/views/auth/SignupPage/SignupPage.tsx index 8b43403669d4b..b6823f3b1189c 100644 --- a/airbyte-webapp/src/packages/cloud/views/auth/SignupPage/SignupPage.tsx +++ b/airbyte-webapp/src/packages/cloud/views/auth/SignupPage/SignupPage.tsx @@ -5,6 +5,7 @@ import styled from "styled-components"; import * as yup from "yup"; import { H1, LabeledInput, Link, LoadingButton } from "components"; +import HeadTitle from "components/HeadTitle"; import { useConfig } from "config"; import { FieldError } from "packages/cloud/lib/errors/FieldError"; @@ -43,6 +44,7 @@ const SignupPage: React.FC = () => { return (
    +

    diff --git a/airbyte-webapp/src/utils/stateUtils.ts b/airbyte-webapp/src/utils/stateUtils.ts deleted file mode 100644 index 108478915fc1b..0000000000000 --- a/airbyte-webapp/src/utils/stateUtils.ts +++ /dev/null @@ -1,5 +0,0 @@ -import type { Location } from "react-router-dom"; - -export function hasFromState(state: unknown): state is { from: Location } { - return typeof state === "object" && state !== null && "from" in state; -} From e0f821f6f32b0aac78b7ef54dc71eda6086c6ba0 Mon Sep 17 00:00:00 2001 From: Marcos Marx Date: Thu, 12 May 2022 18:50:18 -0300 Subject: [PATCH 50/55] Docs: remove deprecated variable for mysql binlog config (#12824) --- docs/integrations/sources/mysql.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/integrations/sources/mysql.md b/docs/integrations/sources/mysql.md index 23119e604695b..ba906bd50b1ad 100644 --- a/docs/integrations/sources/mysql.md +++ b/docs/integrations/sources/mysql.md @@ -86,18 +86,18 @@ Your database user should now be ready for use with Airbyte. You must enable binary logging for MySQL replication. The binary logs record transaction updates for replication tools to propagate changes. You can configure your MySQL server configuration file with the following properties, which are described in below: ```text -server-id = 223344 -log_bin = mysql-bin -binlog_format = ROW -binlog_row_image = FULL -expire_logs_days = 10 +server-id = 223344 +log_bin = mysql-bin +binlog_format = ROW +binlog_row_image = FULL +binlog_expire_log_seconds = 864000 ``` * server-id : The value for the server-id must be unique for each server and replication client in the MySQL cluster. The `server-id` should be a non-zero value. If the `server-id` is already set to a non-zero value, you don't need to make any change. You can set the `server-id` to any value between 1 and 4294967295. For more information refer [mysql doc](https://dev.mysql.com/doc/refman/8.0/en/replication-options.html#sysvar_server_id) * log\_bin : The value of log\_bin is the base name of the sequence of binlog files. If the `log_bin` is already set, you don't need to make any change. For more information refer [mysql doc](https://dev.mysql.com/doc/refman/8.0/en/replication-options-binary-log.html#option_mysqld_log-bin) * binlog\_format : The `binlog_format` must be set to `ROW`. For more information refer [mysql doc](https://dev.mysql.com/doc/refman/8.0/en/replication-options-binary-log.html#sysvar_binlog_format) * binlog\_row\_image : The `binlog_row_image` must be set to `FULL`. It determines how row images are written to the binary log. For more information refer [mysql doc](https://dev.mysql.com/doc/refman/5.7/en/replication-options-binary-log.html#sysvar_binlog_row_image) -* expire\_logs\_days : This is the number of days for automatic binlog file removal. We recommend 10 days so that in case of a failure in sync or if the sync is paused, we still have some bandwidth to start from the last point in incremental sync. We also recommend setting frequent syncs for CDC. +* binlog_expire_log_seconds : This is the number of seconds for automatic binlog file removal. We recommend 864000 seconds (10 days) so that in case of a failure in sync or if the sync is paused, we still have some bandwidth to start from the last point in incremental sync. We also recommend setting frequent syncs for CDC. **2. Enable GTIDs \(Optional\)** From e8084c0189162dafa5c5868dccab6c968d4aec03 Mon Sep 17 00:00:00 2001 From: Lake Mossman Date: Thu, 12 May 2022 17:43:19 -0700 Subject: [PATCH 51/55] Repair temporal state when performing manual actions (#12289) * Repair temporal state when performing manual actions * refactor temporal client and fix tests * add unreachable workflow exception * format * test repeated deletion * add acceptance tests for automatic workflow repair * rename and DRY up manual operation methods in SchedulerHandler * refactor temporal client to batch signal and start requests together in repair case * add comment * remove main method * fix job id fetching * only overwrite workflowState if reset flags are true on input * fix test * fix cancel endpoint * Clean job state before creating new jobs in connection manager workflow (#12589) * first working iteration of cleaning job state on first workflow run * second iteration, with tests * undo local testing changes * move method * add comment explaining placement of clean job state logic * change connection_workflow failure origin value to platform * remove cast from new query * create static var for non terminal job statuses * change failure origin value to airbyte_platform * tweak external message wording * remove unused variable * reword external message * fix merge conflict * remove log lines * move cleaning job state to beginning of workflow * do not clean job state if there is already a job id for this workflow, and add test * see if sleeping fixes test on CI * add repeated test annotation to protect from flakiness * fail jobs before creating new ones to protect from quarantined state * update external message for cleaning job state error --- airbyte-api/src/main/openapi/config.yaml | 1 + .../main/resources/types/FailureReason.yaml | 1 + .../airbyte/scheduler/client/EventRunner.java | 10 +- .../scheduler/client/TemporalEventRunner.java | 12 +- .../airbyte/scheduler/models/JobStatus.java | 1 + .../persistence/DefaultJobPersistence.java | 12 + .../scheduler/persistence/JobPersistence.java | 2 + .../DefaultJobPersistenceTest.java | 39 ++ .../server/handlers/SchedulerHandler.java | 47 ++- .../server/handlers/SchedulerHandlerTest.java | 6 +- .../test/acceptance/AcceptanceTests.java | 99 +++++ .../airbyte/workers/helper/FailureHelper.java | 12 + .../temporal/ConnectionManagerUtils.java | 194 ++++++++++ .../workers/temporal/TemporalClient.java | 231 +++++------- .../exception/DeletedWorkflowException.java | 13 + .../UnreachableWorkflowException.java | 13 + .../ConnectionManagerWorkflowImpl.java | 35 +- .../JobCreationAndStatusUpdateActivity.java | 12 + ...obCreationAndStatusUpdateActivityImpl.java | 44 +++ .../workers/temporal/TemporalClientTest.java | 356 ++++++++++++++++-- .../ConnectionManagerWorkflowTest.java | 22 ++ ...obCreationAndStatusUpdateActivityTest.java | 44 ++- 22 files changed, 994 insertions(+), 212 deletions(-) create mode 100644 airbyte-workers/src/main/java/io/airbyte/workers/temporal/ConnectionManagerUtils.java create mode 100644 airbyte-workers/src/main/java/io/airbyte/workers/temporal/exception/DeletedWorkflowException.java create mode 100644 airbyte-workers/src/main/java/io/airbyte/workers/temporal/exception/UnreachableWorkflowException.java diff --git a/airbyte-api/src/main/openapi/config.yaml b/airbyte-api/src/main/openapi/config.yaml index dd4f72314461d..26adc3dfd97c0 100644 --- a/airbyte-api/src/main/openapi/config.yaml +++ b/airbyte-api/src/main/openapi/config.yaml @@ -3862,6 +3862,7 @@ components: - persistence - normalization - dbt + - airbyte_platform AttemptFailureType: description: Categorizes well known errors into types for programmatic handling. If not set, the type of error is not well known. type: string diff --git a/airbyte-config/models/src/main/resources/types/FailureReason.yaml b/airbyte-config/models/src/main/resources/types/FailureReason.yaml index bae623d6da643..72dced892a78d 100644 --- a/airbyte-config/models/src/main/resources/types/FailureReason.yaml +++ b/airbyte-config/models/src/main/resources/types/FailureReason.yaml @@ -17,6 +17,7 @@ properties: - persistence - normalization - dbt + - airbyte_platform failureType: description: Categorizes well known errors into types for programmatic handling. If not set, the type of error is not well known. type: string diff --git a/airbyte-scheduler/client/src/main/java/io/airbyte/scheduler/client/EventRunner.java b/airbyte-scheduler/client/src/main/java/io/airbyte/scheduler/client/EventRunner.java index f1784a8baff40..95bec9f3f8a9f 100644 --- a/airbyte-scheduler/client/src/main/java/io/airbyte/scheduler/client/EventRunner.java +++ b/airbyte-scheduler/client/src/main/java/io/airbyte/scheduler/client/EventRunner.java @@ -4,7 +4,7 @@ package io.airbyte.scheduler.client; -import io.airbyte.workers.temporal.TemporalClient.ManualSyncSubmissionResult; +import io.airbyte.workers.temporal.TemporalClient.ManualOperationResult; import java.util.Set; import java.util.UUID; @@ -12,13 +12,13 @@ public interface EventRunner { void createNewSchedulerWorkflow(final UUID connectionId); - ManualSyncSubmissionResult startNewManualSync(final UUID connectionId); + ManualOperationResult startNewManualSync(final UUID connectionId); - ManualSyncSubmissionResult startNewCancelation(final UUID connectionId); + ManualOperationResult startNewCancellation(final UUID connectionId); - ManualSyncSubmissionResult resetConnection(final UUID connectionId); + ManualOperationResult resetConnection(final UUID connectionId); - ManualSyncSubmissionResult synchronousResetConnection(final UUID connectionId); + ManualOperationResult synchronousResetConnection(final UUID connectionId); void deleteConnection(final UUID connectionId); diff --git a/airbyte-scheduler/client/src/main/java/io/airbyte/scheduler/client/TemporalEventRunner.java b/airbyte-scheduler/client/src/main/java/io/airbyte/scheduler/client/TemporalEventRunner.java index 9eb7df68198f7..d9d9b075b008b 100644 --- a/airbyte-scheduler/client/src/main/java/io/airbyte/scheduler/client/TemporalEventRunner.java +++ b/airbyte-scheduler/client/src/main/java/io/airbyte/scheduler/client/TemporalEventRunner.java @@ -5,7 +5,7 @@ package io.airbyte.scheduler.client; import io.airbyte.workers.temporal.TemporalClient; -import io.airbyte.workers.temporal.TemporalClient.ManualSyncSubmissionResult; +import io.airbyte.workers.temporal.TemporalClient.ManualOperationResult; import java.util.Set; import java.util.UUID; import lombok.AllArgsConstructor; @@ -19,19 +19,19 @@ public void createNewSchedulerWorkflow(final UUID connectionId) { temporalClient.submitConnectionUpdaterAsync(connectionId); } - public ManualSyncSubmissionResult startNewManualSync(final UUID connectionId) { + public ManualOperationResult startNewManualSync(final UUID connectionId) { return temporalClient.startNewManualSync(connectionId); } - public ManualSyncSubmissionResult startNewCancelation(final UUID connectionId) { - return temporalClient.startNewCancelation(connectionId); + public ManualOperationResult startNewCancellation(final UUID connectionId) { + return temporalClient.startNewCancellation(connectionId); } - public ManualSyncSubmissionResult resetConnection(final UUID connectionId) { + public ManualOperationResult resetConnection(final UUID connectionId) { return temporalClient.resetConnection(connectionId); } - public ManualSyncSubmissionResult synchronousResetConnection(final UUID connectionId) { + public ManualOperationResult synchronousResetConnection(final UUID connectionId) { return temporalClient.synchronousResetConnection(connectionId); } diff --git a/airbyte-scheduler/models/src/main/java/io/airbyte/scheduler/models/JobStatus.java b/airbyte-scheduler/models/src/main/java/io/airbyte/scheduler/models/JobStatus.java index dca3be7b7b929..7fbe68d97c26d 100644 --- a/airbyte-scheduler/models/src/main/java/io/airbyte/scheduler/models/JobStatus.java +++ b/airbyte-scheduler/models/src/main/java/io/airbyte/scheduler/models/JobStatus.java @@ -17,5 +17,6 @@ public enum JobStatus { CANCELLED; public static final Set TERMINAL_STATUSES = Sets.newHashSet(FAILED, SUCCEEDED, CANCELLED); + public static final Set NON_TERMINAL_STATUSES = Sets.difference(Set.of(values()), TERMINAL_STATUSES); } diff --git a/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/DefaultJobPersistence.java b/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/DefaultJobPersistence.java index 28b5d553c3543..7a45dd8e2630e 100644 --- a/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/DefaultJobPersistence.java +++ b/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/DefaultJobPersistence.java @@ -386,6 +386,18 @@ public List listJobsWithStatus(final ConfigType configType, final JobStatus return listJobsWithStatus(Sets.newHashSet(configType), status); } + @Override + public List listJobsForConnectionWithStatuses(final UUID connectionId, final Set configTypes, final Set statuses) + throws IOException { + return jobDatabase.query(ctx -> getJobsFromResult(ctx + .fetch(BASE_JOB_SELECT_AND_JOIN + "WHERE " + + "scope = ? AND " + + "config_type IN " + Sqls.toSqlInFragment(configTypes) + " AND " + + "jobs.status IN " + Sqls.toSqlInFragment(statuses) + " " + + ORDER_BY_JOB_TIME_ATTEMPT_TIME, + connectionId.toString()))); + } + @Override public List listJobStatusAndTimestampWithConnection(final UUID connectionId, final Set configTypes, diff --git a/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/JobPersistence.java b/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/JobPersistence.java index 39124cab645e3..b73310ea7cd73 100644 --- a/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/JobPersistence.java +++ b/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/JobPersistence.java @@ -161,6 +161,8 @@ public interface JobPersistence { List listJobsWithStatus(JobConfig.ConfigType configType, JobStatus status) throws IOException; + List listJobsForConnectionWithStatuses(UUID connectionId, Set configTypes, Set statuses) throws IOException; + /** * @param connectionId The ID of the connection * @param configTypes The types of jobs diff --git a/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/DefaultJobPersistenceTest.java b/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/DefaultJobPersistenceTest.java index 1d283d767f435..e4d44447d99a4 100644 --- a/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/DefaultJobPersistenceTest.java +++ b/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/DefaultJobPersistenceTest.java @@ -1168,6 +1168,45 @@ public void testListJobsWithStatusAndConfigType() throws IOException, Interrupte assertEquals(expectedIncompleteJob, actualIncompleteJob); } + @Test + @DisplayName("Should only list jobs for the requested connection and with the requested statuses and config types") + public void testListJobsWithStatusesAndConfigTypesForConnection() throws IOException, InterruptedException { + final UUID desiredConnectionId = UUID.randomUUID(); + final UUID otherConnectionId = UUID.randomUUID(); + + // desired connection, statuses, and config types + final long desiredJobId1 = jobPersistence.enqueueJob(desiredConnectionId.toString(), SYNC_JOB_CONFIG).orElseThrow(); + jobPersistence.succeedAttempt(desiredJobId1, jobPersistence.createAttempt(desiredJobId1, LOG_PATH)); + final long desiredJobId2 = jobPersistence.enqueueJob(desiredConnectionId.toString(), SYNC_JOB_CONFIG).orElseThrow(); + final long desiredJobId3 = jobPersistence.enqueueJob(desiredConnectionId.toString(), CHECK_JOB_CONFIG).orElseThrow(); + jobPersistence.succeedAttempt(desiredJobId3, jobPersistence.createAttempt(desiredJobId3, LOG_PATH)); + final long desiredJobId4 = jobPersistence.enqueueJob(desiredConnectionId.toString(), CHECK_JOB_CONFIG).orElseThrow(); + + // right connection id and status, wrong config type + final long otherJobId1 = jobPersistence.enqueueJob(desiredConnectionId.toString(), SPEC_JOB_CONFIG).orElseThrow(); + // right config type and status, wrong connection id + final long otherJobId2 = jobPersistence.enqueueJob(otherConnectionId.toString(), SYNC_JOB_CONFIG).orElseThrow(); + // right connection id and config type, wrong status + final long otherJobId3 = jobPersistence.enqueueJob(desiredConnectionId.toString(), CHECK_JOB_CONFIG).orElseThrow(); + jobPersistence.failAttempt(otherJobId3, jobPersistence.createAttempt(otherJobId3, LOG_PATH)); + + final List actualJobs = jobPersistence.listJobsForConnectionWithStatuses(desiredConnectionId, + Set.of(ConfigType.SYNC, ConfigType.CHECK_CONNECTION_DESTINATION), Set.of(JobStatus.PENDING, JobStatus.SUCCEEDED)); + + final Job expectedDesiredJob1 = createJob(desiredJobId1, SYNC_JOB_CONFIG, JobStatus.SUCCEEDED, + Lists.newArrayList(createAttempt(0L, desiredJobId1, AttemptStatus.SUCCEEDED, LOG_PATH)), + NOW.getEpochSecond(), desiredConnectionId.toString()); + final Job expectedDesiredJob2 = + createJob(desiredJobId2, SYNC_JOB_CONFIG, JobStatus.PENDING, Lists.newArrayList(), NOW.getEpochSecond(), desiredConnectionId.toString()); + final Job expectedDesiredJob3 = createJob(desiredJobId3, CHECK_JOB_CONFIG, JobStatus.SUCCEEDED, + Lists.newArrayList(createAttempt(0L, desiredJobId3, AttemptStatus.SUCCEEDED, LOG_PATH)), + NOW.getEpochSecond(), desiredConnectionId.toString()); + final Job expectedDesiredJob4 = + createJob(desiredJobId4, CHECK_JOB_CONFIG, JobStatus.PENDING, Lists.newArrayList(), NOW.getEpochSecond(), desiredConnectionId.toString()); + + assertEquals(Sets.newHashSet(expectedDesiredJob1, expectedDesiredJob2, expectedDesiredJob3, expectedDesiredJob4), Sets.newHashSet(actualJobs)); + } + } @Nested diff --git a/airbyte-server/src/main/java/io/airbyte/server/handlers/SchedulerHandler.java b/airbyte-server/src/main/java/io/airbyte/server/handlers/SchedulerHandler.java index b4f3ba95d2961..0a5bd696f81d3 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/handlers/SchedulerHandler.java +++ b/airbyte-server/src/main/java/io/airbyte/server/handlers/SchedulerHandler.java @@ -71,7 +71,7 @@ import io.airbyte.server.handlers.helpers.CatalogConverter; import io.airbyte.validation.json.JsonSchemaValidator; import io.airbyte.validation.json.JsonValidationException; -import io.airbyte.workers.temporal.TemporalClient.ManualSyncSubmissionResult; +import io.airbyte.workers.temporal.TemporalClient.ManualOperationResult; import io.airbyte.workers.temporal.TemporalUtils; import io.temporal.api.common.v1.WorkflowExecution; import io.temporal.api.workflowservice.v1.RequestCancelWorkflowExecutionRequest; @@ -364,7 +364,7 @@ public DestinationDefinitionSpecificationRead getDestinationSpecification( public JobInfoRead syncConnection(final ConnectionIdRequestBody connectionIdRequestBody) throws ConfigNotFoundException, IOException, JsonValidationException { if (featureFlags.usesNewScheduler()) { - return createManualRun(connectionIdRequestBody.getConnectionId()); + return submitManualSyncToWorker(connectionIdRequestBody.getConnectionId()); } final UUID connectionId = connectionIdRequestBody.getConnectionId(); final StandardSync standardSync = configRepository.getStandardSync(connectionId); @@ -411,7 +411,7 @@ public JobInfoRead syncConnection(final ConnectionIdRequestBody connectionIdRequ public JobInfoRead resetConnection(final ConnectionIdRequestBody connectionIdRequestBody) throws IOException, JsonValidationException, ConfigNotFoundException { if (featureFlags.usesNewScheduler()) { - return resetConnectionWithNewScheduler(connectionIdRequestBody.getConnectionId()); + return submitResetConnectionToWorker(connectionIdRequestBody.getConnectionId()); } final UUID connectionId = connectionIdRequestBody.getConnectionId(); final StandardSync standardSync = configRepository.getStandardSync(connectionId); @@ -447,7 +447,7 @@ public ConnectionState getState(final ConnectionIdRequestBody connectionIdReques // todo (cgardens) - this method needs a test. public JobInfoRead cancelJob(final JobIdRequestBody jobIdRequestBody) throws IOException { if (featureFlags.usesNewScheduler()) { - return createNewSchedulerCancellation(jobIdRequestBody.getId()); + return submitCancellationToWorker(jobIdRequestBody.getId()); } final long jobId = jobIdRequestBody.getId(); @@ -509,39 +509,36 @@ private ConnectorSpecification getSpecFromDestinationDefinitionId(final UUID des return destinationDef.getSpec(); } - private JobInfoRead createNewSchedulerCancellation(final Long id) throws IOException { - final Job job = jobPersistence.getJob(id); - - final ManualSyncSubmissionResult cancellationSubmissionResult = eventRunner.startNewCancelation(UUID.fromString(job.getScope())); + private JobInfoRead submitCancellationToWorker(final Long jobId) throws IOException { + final Job job = jobPersistence.getJob(jobId); - if (cancellationSubmissionResult.getFailingReason().isPresent()) { - throw new IllegalStateException(cancellationSubmissionResult.getFailingReason().get()); + final ManualOperationResult cancellationResult = eventRunner.startNewCancellation(UUID.fromString(job.getScope())); + if (cancellationResult.getFailingReason().isPresent()) { + throw new IllegalStateException(cancellationResult.getFailingReason().get()); } - final Job cancelledJob = jobPersistence.getJob(id); - return jobConverter.getJobInfoRead(cancelledJob); + // query same job ID again to get updated job info after cancellation + return jobConverter.getJobInfoRead(jobPersistence.getJob(jobId)); } - private JobInfoRead createManualRun(final UUID connectionId) throws IOException { - final ManualSyncSubmissionResult manualSyncSubmissionResult = eventRunner.startNewManualSync(connectionId); + private JobInfoRead submitManualSyncToWorker(final UUID connectionId) throws IOException { + final ManualOperationResult manualSyncResult = eventRunner.startNewManualSync(connectionId); - if (manualSyncSubmissionResult.getFailingReason().isPresent()) { - throw new IllegalStateException(manualSyncSubmissionResult.getFailingReason().get()); - } + return readJobFromResult(manualSyncResult); + } - final Job job = jobPersistence.getJob(manualSyncSubmissionResult.getJobId().get()); + private JobInfoRead submitResetConnectionToWorker(final UUID connectionId) throws IOException { + final ManualOperationResult resetConnectionResult = eventRunner.resetConnection(connectionId); - return jobConverter.getJobInfoRead(job); + return readJobFromResult(resetConnectionResult); } - private JobInfoRead resetConnectionWithNewScheduler(final UUID connectionId) throws IOException { - final ManualSyncSubmissionResult manualSyncSubmissionResult = eventRunner.resetConnection(connectionId); - - if (manualSyncSubmissionResult.getFailingReason().isPresent()) { - throw new IllegalStateException(manualSyncSubmissionResult.getFailingReason().get()); + private JobInfoRead readJobFromResult(final ManualOperationResult manualOperationResult) throws IOException, IllegalStateException { + if (manualOperationResult.getFailingReason().isPresent()) { + throw new IllegalStateException(manualOperationResult.getFailingReason().get()); } - final Job job = jobPersistence.getJob(manualSyncSubmissionResult.getJobId().get()); + final Job job = jobPersistence.getJob(manualOperationResult.getJobId().get()); return jobConverter.getJobInfoRead(job); } diff --git a/airbyte-server/src/test/java/io/airbyte/server/handlers/SchedulerHandlerTest.java b/airbyte-server/src/test/java/io/airbyte/server/handlers/SchedulerHandlerTest.java index b45044993f72d..433071145b341 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/handlers/SchedulerHandlerTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/handlers/SchedulerHandlerTest.java @@ -85,7 +85,7 @@ import io.airbyte.server.helpers.SourceHelpers; import io.airbyte.validation.json.JsonSchemaValidator; import io.airbyte.validation.json.JsonValidationException; -import io.airbyte.workers.temporal.TemporalClient.ManualSyncSubmissionResult; +import io.airbyte.workers.temporal.TemporalClient.ManualOperationResult; import io.temporal.serviceclient.WorkflowServiceStubs; import java.io.IOException; import java.net.URI; @@ -720,14 +720,14 @@ void testNewSchedulerSync() throws JsonValidationException, ConfigNotFoundExcept final UUID connectionId = UUID.randomUUID(); final long jobId = 123L; - final ManualSyncSubmissionResult manualSyncSubmissionResult = ManualSyncSubmissionResult + final ManualOperationResult manualOperationResult = ManualOperationResult .builder() .failingReason(Optional.empty()) .jobId(Optional.of(jobId)) .build(); when(eventRunner.startNewManualSync(connectionId)) - .thenReturn(manualSyncSubmissionResult); + .thenReturn(manualOperationResult); doReturn(new JobInfoRead()) .when(jobConverter).getJobInfoRead(any()); diff --git a/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/AcceptanceTests.java b/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/AcceptanceTests.java index 030a83b8dd474..0638c6a58a70e 100644 --- a/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/AcceptanceTests.java +++ b/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/AcceptanceTests.java @@ -1193,6 +1193,7 @@ public void testDeleteConnection() throws Exception { waitWhileJobHasStatus(apiClient.getJobsApi(), connectionSyncRead.getJob(), Set.of(JobStatus.RUNNING)); // test normal deletion of connection + LOGGER.info("Calling delete connection..."); apiClient.getConnectionApi().deleteConnection(new ConnectionIdRequestBody().connectionId(connectionId)); // remove connection to avoid exception during tear down @@ -1205,6 +1206,10 @@ public void testDeleteConnection() throws Exception { apiClient.getConnectionApi().getConnection(new ConnectionIdRequestBody().connectionId(connectionId)).getStatus(); assertEquals(ConnectionStatus.DEPRECATED, connectionStatus); + // test that repeated deletion call for same connection is successful + LOGGER.info("Calling delete connection a second time to test repeat call behavior..."); + apiClient.getConnectionApi().deleteConnection(new ConnectionIdRequestBody().connectionId(connectionId)); + // test deletion of connection when temporal workflow is in a bad state, only when using new // scheduler final FeatureFlags featureFlags = new EnvVariableFeatureFlags(); @@ -1268,6 +1273,100 @@ public void testUpdateConnectionWhenWorkflowUnreachable() throws Exception { } } + @Test + @Order(24) + public void testManualSyncRepairsWorkflowWhenWorkflowUnreachable() throws Exception { + // This test only covers the specific behavior of updating a connection that does not have an + // underlying temporal workflow. + // This case only occurs with the new scheduler, so the entire test is inside the feature flag + // conditional. + final FeatureFlags featureFlags = new EnvVariableFeatureFlags(); + if (featureFlags.usesNewScheduler()) { + final String connectionName = "test-connection"; + final SourceDefinitionRead sourceDefinition = createE2eSourceDefinition(); + final SourceRead source = createSource( + "E2E Test Source -" + UUID.randomUUID(), + workspaceId, + sourceDefinition.getSourceDefinitionId(), + Jsons.jsonNode(ImmutableMap.builder() + .put("type", "INFINITE_FEED") + .put("max_records", 5000) + .put("message_interval", 100) + .build())); + final UUID sourceId = source.getSourceId(); + final UUID destinationId = createDestination().getDestinationId(); + final UUID operationId = createOperation().getOperationId(); + final AirbyteCatalog catalog = discoverSourceSchema(sourceId); + final SyncMode syncMode = SyncMode.INCREMENTAL; + final DestinationSyncMode destinationSyncMode = DestinationSyncMode.APPEND_DEDUP; + catalog.getStreams().forEach(s -> s.getConfig() + .syncMode(syncMode) + .cursorField(List.of(COLUMN_ID)) + .destinationSyncMode(destinationSyncMode) + .primaryKey(List.of(List.of(COLUMN_NAME)))); + + LOGGER.info("Testing manual sync when temporal is in a terminal state"); + final UUID connectionId = createConnection(connectionName, sourceId, destinationId, List.of(operationId), catalog, null).getConnectionId(); + + LOGGER.info("Starting first manual sync"); + final JobInfoRead firstJobInfo = apiClient.getConnectionApi().syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); + LOGGER.info("Terminating workflow during first sync"); + terminateTemporalWorkflow(connectionId); + + LOGGER.info("Submitted another manual sync"); + apiClient.getConnectionApi().syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); + + LOGGER.info("Waiting for workflow to be recreated..."); + Thread.sleep(500); + + final WorkflowState workflowState = getWorkflowState(connectionId); + assertTrue(workflowState.isRunning()); + assertTrue(workflowState.isSkipScheduling()); + + // verify that the first manual sync was marked as failed + final JobInfoRead terminatedJobInfo = apiClient.getJobsApi().getJobInfo(new JobIdRequestBody().id(firstJobInfo.getJob().getId())); + assertEquals(JobStatus.FAILED, terminatedJobInfo.getJob().getStatus()); + } + } + + @Test + @Order(25) + public void testResetConnectionRepairsWorkflowWhenWorkflowUnreachable() throws Exception { + // This test only covers the specific behavior of updating a connection that does not have an + // underlying temporal workflow. + // This case only occurs with the new scheduler, so the entire test is inside the feature flag + // conditional. + final FeatureFlags featureFlags = new EnvVariableFeatureFlags(); + if (featureFlags.usesNewScheduler()) { + final String connectionName = "test-connection"; + final UUID sourceId = createPostgresSource().getSourceId(); + final UUID destinationId = createDestination().getDestinationId(); + final UUID operationId = createOperation().getOperationId(); + final AirbyteCatalog catalog = discoverSourceSchema(sourceId); + final SyncMode syncMode = SyncMode.INCREMENTAL; + final DestinationSyncMode destinationSyncMode = DestinationSyncMode.APPEND_DEDUP; + catalog.getStreams().forEach(s -> s.getConfig() + .syncMode(syncMode) + .cursorField(List.of(COLUMN_ID)) + .destinationSyncMode(destinationSyncMode) + .primaryKey(List.of(List.of(COLUMN_NAME)))); + + LOGGER.info("Testing reset connection when temporal is in a terminal state"); + final UUID connectionId = createConnection(connectionName, sourceId, destinationId, List.of(operationId), catalog, null).getConnectionId(); + + terminateTemporalWorkflow(connectionId); + + apiClient.getConnectionApi().resetConnection(new ConnectionIdRequestBody().connectionId(connectionId)); + + LOGGER.info("Waiting for workflow to be recreated..."); + Thread.sleep(500); + + final WorkflowState workflowState = getWorkflowState(connectionId); + assertTrue(workflowState.isRunning()); + assertTrue(workflowState.isResetConnection()); + } + } + private WorkflowClient getWorkflowClient() { final WorkflowServiceStubs temporalService = TemporalUtils.createTemporalService("localhost:7233"); return WorkflowClient.newInstance(temporalService); diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/helper/FailureHelper.java b/airbyte-workers/src/main/java/io/airbyte/workers/helper/FailureHelper.java index b01995e42cda7..644bcafff8e8b 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/helper/FailureHelper.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/helper/FailureHelper.java @@ -95,6 +95,18 @@ public static AttemptFailureSummary failureSummaryForCancellation(final Long job return failureSummary(failures, partialSuccess); } + public static AttemptFailureSummary failureSummaryForTemporalCleaningJobState(final Long jobId, final Integer attemptNumber) { + final FailureReason failureReason = new FailureReason() + .withFailureOrigin(FailureOrigin.AIRBYTE_PLATFORM) + .withFailureType(FailureType.SYSTEM_ERROR) + .withInternalMessage( + "Setting attempt to FAILED because the temporal workflow for this connection was restarted, and existing job state was cleaned.") + .withExternalMessage("An internal Airbyte error has occurred. This sync will need to be retried.") + .withTimestamp(System.currentTimeMillis()) + .withMetadata(jobAndAttemptMetadata(jobId, attemptNumber)); + return new AttemptFailureSummary().withFailures(List.of(failureReason)); + } + public static FailureReason failureReasonFromWorkflowAndActivity( final String workflowType, final String activityType, diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/ConnectionManagerUtils.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/ConnectionManagerUtils.java new file mode 100644 index 0000000000000..fcc152c76c196 --- /dev/null +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/ConnectionManagerUtils.java @@ -0,0 +1,194 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.workers.temporal; + +import io.airbyte.workers.temporal.exception.DeletedWorkflowException; +import io.airbyte.workers.temporal.exception.UnreachableWorkflowException; +import io.airbyte.workers.temporal.scheduling.ConnectionManagerWorkflow; +import io.airbyte.workers.temporal.scheduling.ConnectionManagerWorkflowImpl; +import io.airbyte.workers.temporal.scheduling.ConnectionUpdaterInput; +import io.airbyte.workers.temporal.scheduling.state.WorkflowState; +import io.temporal.client.BatchRequest; +import io.temporal.client.WorkflowClient; +import io.temporal.workflow.Functions.Proc; +import io.temporal.workflow.Functions.Proc1; +import io.temporal.workflow.Functions.TemporalFunctionalInterfaceMarker; +import java.util.Optional; +import java.util.UUID; +import java.util.function.Function; +import lombok.extern.slf4j.Slf4j; + +/** + * Encapsulates logic specific to retrieving, starting, and signaling the ConnectionManagerWorkflow. + */ +@Slf4j +public class ConnectionManagerUtils { + + /** + * Attempts to send a signal to the existing ConnectionManagerWorkflow for the provided connection. + * + * If the workflow is unreachable, this will restart the workflow and send the signal in a single + * batched request. Batching is used to avoid race conditions between starting the workflow and + * executing the signal. + * + * @param client the WorkflowClient for interacting with temporal + * @param connectionId the connection ID to execute this operation for + * @param signalMethod a function that takes in a connection manager workflow and executes a signal + * method on it, with no arguments + * @return the healthy connection manager workflow that was signaled + * @throws DeletedWorkflowException if the connection manager workflow was deleted + */ + static ConnectionManagerWorkflow signalWorkflowAndRepairIfNecessary(final WorkflowClient client, + final UUID connectionId, + final Function signalMethod) + throws DeletedWorkflowException { + return signalWorkflowAndRepairIfNecessary(client, connectionId, signalMethod, Optional.empty()); + } + + /** + * Attempts to send a signal to the existing ConnectionManagerWorkflow for the provided connection. + * + * If the workflow is unreachable, this will restart the workflow and send the signal in a single + * batched request. Batching is used to avoid race conditions between starting the workflow and + * executing the signal. + * + * @param client the WorkflowClient for interacting with temporal + * @param connectionId the connection ID to execute this operation for + * @param signalMethod a function that takes in a connection manager workflow and executes a signal + * method on it, with 1 argument + * @param signalArgument the single argument to be input to the signal + * @return the healthy connection manager workflow that was signaled + * @throws DeletedWorkflowException if the connection manager workflow was deleted + */ + static ConnectionManagerWorkflow signalWorkflowAndRepairIfNecessary(final WorkflowClient client, + final UUID connectionId, + final Function> signalMethod, + final T signalArgument) + throws DeletedWorkflowException { + return signalWorkflowAndRepairIfNecessary(client, connectionId, signalMethod, Optional.of(signalArgument)); + } + + // This method unifies the logic of the above two, by using the optional signalArgument parameter to + // indicate if an argument is being provided to the signal or not. + // Keeping this private and only exposing the above methods outside this class provides a strict + // type enforcement for external calls, and means this method can assume consistent type + // implementations for both cases. + private static ConnectionManagerWorkflow signalWorkflowAndRepairIfNecessary(final WorkflowClient client, + final UUID connectionId, + final Function signalMethod, + final Optional signalArgument) + throws DeletedWorkflowException { + try { + final ConnectionManagerWorkflow connectionManagerWorkflow = getConnectionManagerWorkflow(client, connectionId); + log.info("Retrieved existing connection manager workflow for connection {}. Executing signal.", connectionId); + // retrieve the signal from the lambda + final TemporalFunctionalInterfaceMarker signal = signalMethod.apply(connectionManagerWorkflow); + // execute the signal + if (signalArgument.isPresent()) { + ((Proc1) signal).apply(signalArgument.get()); + } else { + ((Proc) signal).apply(); + } + return connectionManagerWorkflow; + } catch (final UnreachableWorkflowException e) { + log.error( + String.format( + "Failed to retrieve ConnectionManagerWorkflow for connection %s. Repairing state by creating new workflow and starting with the signal.", + connectionId), + e); + + final ConnectionManagerWorkflow connectionManagerWorkflow = newConnectionManagerWorkflowStub(client, connectionId); + final ConnectionUpdaterInput startWorkflowInput = buildStartWorkflowInput(connectionId); + + final BatchRequest batchRequest = client.newSignalWithStartRequest(); + batchRequest.add(connectionManagerWorkflow::run, startWorkflowInput); + + // retrieve the signal from the lambda + final TemporalFunctionalInterfaceMarker signal = signalMethod.apply(connectionManagerWorkflow); + // add signal to batch request + if (signalArgument.isPresent()) { + batchRequest.add((Proc1) signal, signalArgument.get()); + } else { + batchRequest.add((Proc) signal); + } + + client.signalWithStart(batchRequest); + log.info("Connection manager workflow for connection {} has been started and signaled.", connectionId); + + return connectionManagerWorkflow; + } + } + + static ConnectionManagerWorkflow startConnectionManagerNoSignal(final WorkflowClient client, final UUID connectionId) { + final ConnectionManagerWorkflow connectionManagerWorkflow = newConnectionManagerWorkflowStub(client, connectionId); + final ConnectionUpdaterInput input = buildStartWorkflowInput(connectionId); + + WorkflowClient.start(connectionManagerWorkflow::run, input); + + return connectionManagerWorkflow; + } + + /** + * Attempts to retrieve the connection manager workflow for the provided connection. + * + * @param connectionId the ID of the connection whose workflow should be retrieved + * @return the healthy ConnectionManagerWorkflow + * @throws DeletedWorkflowException if the workflow was deleted, according to the workflow state + * @throws UnreachableWorkflowException if the workflow is unreachable + */ + static ConnectionManagerWorkflow getConnectionManagerWorkflow(final WorkflowClient client, final UUID connectionId) + throws DeletedWorkflowException, UnreachableWorkflowException { + final ConnectionManagerWorkflow connectionManagerWorkflow; + final WorkflowState workflowState; + try { + connectionManagerWorkflow = client.newWorkflowStub(ConnectionManagerWorkflow.class, getConnectionManagerName(connectionId)); + workflowState = connectionManagerWorkflow.getState(); + } catch (final Exception e) { + throw new UnreachableWorkflowException( + String.format("Failed to retrieve ConnectionManagerWorkflow for connection %s due to the following error:", connectionId), + e); + } + + if (workflowState.isDeleted()) { + throw new DeletedWorkflowException(String.format( + "The connection manager workflow for connection %s is deleted, so no further operations cannot be performed on it.", + connectionId)); + } + + return connectionManagerWorkflow; + } + + static long getCurrentJobId(final WorkflowClient client, final UUID connectionId) { + try { + final ConnectionManagerWorkflow connectionManagerWorkflow = ConnectionManagerUtils.getConnectionManagerWorkflow(client, connectionId); + return connectionManagerWorkflow.getJobInformation().getJobId(); + } catch (final Exception e) { + return ConnectionManagerWorkflowImpl.NON_RUNNING_JOB_ID; + } + } + + static ConnectionManagerWorkflow newConnectionManagerWorkflowStub(final WorkflowClient client, final UUID connectionId) { + return client.newWorkflowStub(ConnectionManagerWorkflow.class, + TemporalUtils.getWorkflowOptionsWithWorkflowId(TemporalJobType.CONNECTION_UPDATER, getConnectionManagerName(connectionId))); + } + + static String getConnectionManagerName(final UUID connectionId) { + return "connection_manager_" + connectionId; + } + + static ConnectionUpdaterInput buildStartWorkflowInput(final UUID connectionId) { + return ConnectionUpdaterInput.builder() + .connectionId(connectionId) + .jobId(null) + .attemptId(null) + .fromFailure(false) + .attemptNumber(1) + .workflowState(null) + .resetConnection(false) + .fromJobResetFailure(false) + .build(); + } + +} diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/TemporalClient.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/TemporalClient.java index e65920e6083fd..a8ced41f38391 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/TemporalClient.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/TemporalClient.java @@ -23,14 +23,14 @@ import io.airbyte.workers.WorkerUtils; import io.airbyte.workers.temporal.check.connection.CheckConnectionWorkflow; import io.airbyte.workers.temporal.discover.catalog.DiscoverCatalogWorkflow; +import io.airbyte.workers.temporal.exception.DeletedWorkflowException; +import io.airbyte.workers.temporal.exception.UnreachableWorkflowException; import io.airbyte.workers.temporal.scheduling.ConnectionManagerWorkflow; -import io.airbyte.workers.temporal.scheduling.ConnectionUpdaterInput; import io.airbyte.workers.temporal.scheduling.state.WorkflowState; import io.airbyte.workers.temporal.spec.SpecWorkflow; import io.airbyte.workers.temporal.sync.SyncWorkflow; import io.temporal.api.workflowservice.v1.ListOpenWorkflowExecutionsRequest; import io.temporal.api.workflowservice.v1.ListOpenWorkflowExecutionsResponse; -import io.temporal.client.BatchRequest; import io.temporal.client.WorkflowClient; import io.temporal.serviceclient.WorkflowServiceStubs; import java.nio.file.Path; @@ -164,7 +164,7 @@ public void migrateSyncIfNeeded(final Set connectionIds) { connectionIds.forEach((connectionId) -> { final StopWatch singleSyncMigrationWatch = new StopWatch(); singleSyncMigrationWatch.start(); - if (!isInRunningWorkflowCache(getConnectionManagerName(connectionId))) { + if (!isInRunningWorkflowCache(ConnectionManagerUtils.getConnectionManagerName(connectionId))) { log.info("Migrating: " + connectionId); try { submitConnectionUpdaterAsync(connectionId); @@ -214,32 +214,16 @@ void refreshRunningWorkflow() { } while (token != null && token.size() > 0); } - public void submitConnectionUpdaterAsync(final UUID connectionId) { + public ConnectionManagerWorkflow submitConnectionUpdaterAsync(final UUID connectionId) { log.info("Starting the scheduler temporal wf"); - final ConnectionManagerWorkflow connectionManagerWorkflow = getWorkflowOptionsWithWorkflowId(ConnectionManagerWorkflow.class, - TemporalJobType.CONNECTION_UPDATER, getConnectionManagerName(connectionId)); - final BatchRequest signalRequest = client.newSignalWithStartRequest(); - final ConnectionUpdaterInput input = ConnectionUpdaterInput.builder() - .connectionId(connectionId) - .jobId(null) - .attemptId(null) - .fromFailure(false) - .attemptNumber(1) - .workflowState(null) - .resetConnection(false) - .fromJobResetFailure(false) - .build(); - - signalRequest.add(connectionManagerWorkflow::run, input); - - WorkflowClient.start(connectionManagerWorkflow::run, input); + final ConnectionManagerWorkflow connectionManagerWorkflow = ConnectionManagerUtils.startConnectionManagerNoSignal(client, connectionId); try { CompletableFuture.supplyAsync(() -> { try { do { Thread.sleep(DELAY_BETWEEN_QUERY_MS); - } while (!isWorkflowReachable(getConnectionManagerName(connectionId))); + } while (!isWorkflowReachable(connectionId)); } catch (final InterruptedException e) {} return null; @@ -249,86 +233,72 @@ public void submitConnectionUpdaterAsync(final UUID connectionId) { } catch (final TimeoutException e) { log.error("Can't create a new connection manager workflow due to timeout", e); } + + return connectionManagerWorkflow; } public void deleteConnection(final UUID connectionId) { try { - final ConnectionManagerWorkflow connectionManagerWorkflow = getConnectionUpdateWorkflow(connectionId); - connectionManagerWorkflow.deleteConnection(); - } catch (final IllegalStateException e) { - log.info("Connection in an illegal state; Creating new workflow and sending delete signal"); - - final ConnectionManagerWorkflow connectionManagerWorkflow = getWorkflowOptionsWithWorkflowId(ConnectionManagerWorkflow.class, - TemporalJobType.CONNECTION_UPDATER, getConnectionManagerName(connectionId)); - - final ConnectionUpdaterInput input = ConnectionUpdaterInput.builder() - .connectionId(connectionId) - .jobId(null) - .attemptId(null) - .fromFailure(false) - .attemptNumber(1) - .workflowState(null) - .resetConnection(false) - .fromJobResetFailure(false) - .build(); - - final BatchRequest signalRequest = client.newSignalWithStartRequest(); - signalRequest.add(connectionManagerWorkflow::run, input); - signalRequest.add(connectionManagerWorkflow::deleteConnection); - client.signalWithStart(signalRequest); - log.info("New start request and delete signal submitted"); + ConnectionManagerUtils.signalWorkflowAndRepairIfNecessary(client, connectionId, + connectionManagerWorkflow -> connectionManagerWorkflow::deleteConnection); + } catch (final DeletedWorkflowException e) { + log.info("Connection {} has already been deleted.", connectionId); } } public void update(final UUID connectionId) { - final boolean workflowReachable = isWorkflowReachable(getConnectionManagerName(connectionId)); - - if (!workflowReachable) { - // if a workflow is not reachable for update, create a new workflow + final ConnectionManagerWorkflow connectionManagerWorkflow; + try { + connectionManagerWorkflow = ConnectionManagerUtils.getConnectionManagerWorkflow(client, connectionId); + } catch (final DeletedWorkflowException e) { + log.info("Connection {} is deleted, and therefore cannot be updated.", connectionId); + return; + } catch (final UnreachableWorkflowException e) { + log.error( + String.format("Failed to retrieve ConnectionManagerWorkflow for connection %s. Repairing state by creating new workflow.", connectionId), + e); submitConnectionUpdaterAsync(connectionId); - } else { - final ConnectionManagerWorkflow connectionManagerWorkflow = getConnectionUpdateWorkflow(connectionId); - connectionManagerWorkflow.connectionUpdated(); + return; } + + connectionManagerWorkflow.connectionUpdated(); } @Value @Builder - public static class ManualSyncSubmissionResult { + public static class ManualOperationResult { final Optional failingReason; final Optional jobId; } - public ManualSyncSubmissionResult startNewManualSync(final UUID connectionId) { + public ManualOperationResult startNewManualSync(final UUID connectionId) { log.info("Manual sync request"); - final boolean workflowReachable = isWorkflowReachable(getConnectionManagerName(connectionId)); - - if (!workflowReachable) { - return new ManualSyncSubmissionResult( - Optional.of("No scheduler workflow is reachable for: " + connectionId), - Optional.empty()); - } - - final ConnectionManagerWorkflow connectionManagerWorkflow = - getExistingWorkflow(ConnectionManagerWorkflow.class, getConnectionManagerName(connectionId)); - final WorkflowState workflowState = connectionManagerWorkflow.getState(); - if (workflowState.isRunning()) { + if (isWorkflowStateRunning(connectionId)) { // TODO Bmoric: Error is running - return new ManualSyncSubmissionResult( + return new ManualOperationResult( Optional.of("A sync is already running for: " + connectionId), Optional.empty()); } - connectionManagerWorkflow.submitManualSync(); + final ConnectionManagerWorkflow connectionManagerWorkflow; + try { + connectionManagerWorkflow = + ConnectionManagerUtils.signalWorkflowAndRepairIfNecessary(client, connectionId, workflow -> workflow::submitManualSync); + } catch (final DeletedWorkflowException e) { + log.error("Can't sync a deleted connection.", e); + return new ManualOperationResult( + Optional.of(e.getMessage()), + Optional.empty()); + } do { try { Thread.sleep(DELAY_BETWEEN_QUERY_MS); } catch (final InterruptedException e) { - return new ManualSyncSubmissionResult( + return new ManualOperationResult( Optional.of("Didn't managed to start a sync for: " + connectionId), Optional.empty()); } @@ -338,79 +308,64 @@ public ManualSyncSubmissionResult startNewManualSync(final UUID connectionId) { final long jobId = connectionManagerWorkflow.getJobInformation().getJobId(); - return new ManualSyncSubmissionResult( + return new ManualOperationResult( Optional.empty(), Optional.of(jobId)); } - @Value - public class NewCancellationSubmissionResult { - - final Optional failingReason; - final Optional jobId; + public ManualOperationResult startNewCancellation(final UUID connectionId) { + log.info("Manual cancellation request"); - } + final long jobId = ConnectionManagerUtils.getCurrentJobId(client, connectionId); - public ManualSyncSubmissionResult startNewCancelation(final UUID connectionId) { - log.info("Manual sync request"); - - final boolean workflowReachable = isWorkflowReachable(getConnectionManagerName(connectionId)); - - if (!workflowReachable) { - log.error("Can't cancel a non running workflow"); - return new ManualSyncSubmissionResult( - Optional.of("No scheduler workflow is reachable for: " + connectionId), + try { + ConnectionManagerUtils.signalWorkflowAndRepairIfNecessary(client, connectionId, workflow -> workflow::cancelJob); + } catch (final DeletedWorkflowException e) { + log.error("Can't cancel a deleted workflow", e); + return new ManualOperationResult( + Optional.of(e.getMessage()), Optional.empty()); } - final ConnectionManagerWorkflow connectionManagerWorkflow = - getExistingWorkflow(ConnectionManagerWorkflow.class, getConnectionManagerName(connectionId)); - - connectionManagerWorkflow.cancelJob(); - do { try { Thread.sleep(DELAY_BETWEEN_QUERY_MS); } catch (final InterruptedException e) { - return new ManualSyncSubmissionResult( - Optional.of("Didn't manage cancel a sync for: " + connectionId), + return new ManualOperationResult( + Optional.of("Didn't manage to cancel a sync for: " + connectionId), Optional.empty()); } - } while (isWorkflowStateRunning(getConnectionManagerName(connectionId))); + } while (isWorkflowStateRunning(connectionId)); log.info("end of manual cancellation"); - final long jobId = connectionManagerWorkflow.getJobInformation().getJobId(); - - return new ManualSyncSubmissionResult( + return new ManualOperationResult( Optional.empty(), Optional.of(jobId)); } - public ManualSyncSubmissionResult resetConnection(final UUID connectionId) { + public ManualOperationResult resetConnection(final UUID connectionId) { log.info("reset sync request"); - final boolean workflowReachable = isWorkflowReachable(getConnectionManagerName(connectionId)); + // get the job ID before the reset, defaulting to NON_RUNNING_JOB_ID if workflow is unreachable + final long oldJobId = ConnectionManagerUtils.getCurrentJobId(client, connectionId); - if (!workflowReachable) { - log.error("Can't reset a non-reachable workflow"); - return new ManualSyncSubmissionResult( - Optional.of("No scheduler workflow is reachable for: " + connectionId), + final ConnectionManagerWorkflow connectionManagerWorkflow; + try { + connectionManagerWorkflow = + ConnectionManagerUtils.signalWorkflowAndRepairIfNecessary(client, connectionId, workflow -> workflow::resetConnection); + } catch (final DeletedWorkflowException e) { + log.error("Can't reset a deleted workflow", e); + return new ManualOperationResult( + Optional.of(e.getMessage()), Optional.empty()); } - final ConnectionManagerWorkflow connectionManagerWorkflow = - getExistingWorkflow(ConnectionManagerWorkflow.class, getConnectionManagerName(connectionId)); - - final long oldJobId = connectionManagerWorkflow.getJobInformation().getJobId(); - - connectionManagerWorkflow.resetConnection(); - do { try { Thread.sleep(DELAY_BETWEEN_QUERY_MS); } catch (final InterruptedException e) { - return new ManualSyncSubmissionResult( + return new ManualOperationResult( Optional.of("Didn't manage to reset a sync for: " + connectionId), Optional.empty()); } @@ -420,7 +375,7 @@ public ManualSyncSubmissionResult resetConnection(final UUID connectionId) { final long jobId = connectionManagerWorkflow.getJobInformation().getJobId(); - return new ManualSyncSubmissionResult( + return new ManualOperationResult( Optional.empty(), Optional.of(jobId)); } @@ -431,14 +386,21 @@ public ManualSyncSubmissionResult resetConnection(final UUID connectionId) { * The way to do so is to wait for the jobId to change, either to a new job id or the default id * that signal that a workflow is waiting to be submitted */ - public ManualSyncSubmissionResult synchronousResetConnection(final UUID connectionId) { - final ManualSyncSubmissionResult resetResult = resetConnection(connectionId); + public ManualOperationResult synchronousResetConnection(final UUID connectionId) { + final ManualOperationResult resetResult = resetConnection(connectionId); if (resetResult.getFailingReason().isPresent()) { return resetResult; } - final ConnectionManagerWorkflow connectionManagerWorkflow = - getExistingWorkflow(ConnectionManagerWorkflow.class, getConnectionManagerName(connectionId)); + final ConnectionManagerWorkflow connectionManagerWorkflow; + try { + connectionManagerWorkflow = ConnectionManagerUtils.getConnectionManagerWorkflow(client, connectionId); + } catch (final Exception e) { + log.error("Encountered exception retrieving workflow after reset.", e); + return new ManualOperationResult( + Optional.of(e.getMessage()), + Optional.empty()); + } final long oldJobId = connectionManagerWorkflow.getJobInformation().getJobId(); @@ -446,7 +408,7 @@ public ManualSyncSubmissionResult synchronousResetConnection(final UUID connecti try { Thread.sleep(DELAY_BETWEEN_QUERY_MS); } catch (final InterruptedException e) { - return new ManualSyncSubmissionResult( + return new ManualOperationResult( Optional.of("Didn't manage to reset a sync for: " + connectionId), Optional.empty()); } @@ -456,7 +418,7 @@ public ManualSyncSubmissionResult synchronousResetConnection(final UUID connecti final long jobId = connectionManagerWorkflow.getJobInformation().getJobId(); - return new ManualSyncSubmissionResult( + return new ManualOperationResult( Optional.empty(), Optional.of(jobId)); } @@ -469,23 +431,6 @@ private T getWorkflowOptionsWithWorkflowId(final Class workflowClass, fin return client.newWorkflowStub(workflowClass, TemporalUtils.getWorkflowOptionsWithWorkflowId(jobType, name)); } - private T getExistingWorkflow(final Class workflowClass, final String name) { - return client.newWorkflowStub(workflowClass, name); - } - - ConnectionManagerWorkflow getConnectionUpdateWorkflow(final UUID connectionId) { - final boolean workflowReachable = isWorkflowReachable(getConnectionManagerName(connectionId)); - - if (!workflowReachable) { - throw new IllegalStateException("No reachable workflow for the connection {} while trying to delete it"); - } - - final ConnectionManagerWorkflow connectionManagerWorkflow = - getExistingWorkflow(ConnectionManagerWorkflow.class, getConnectionManagerName(connectionId)); - - return connectionManagerWorkflow; - } - @VisibleForTesting TemporalResponse execute(final JobRunConfig jobRunConfig, final Supplier executor) { final Path jobRoot = WorkerUtils.getJobRoot(workspaceRoot, jobRunConfig); @@ -506,14 +451,12 @@ TemporalResponse execute(final JobRunConfig jobRunConfig, final Supplier< /** * Check if a workflow is reachable for signal calls by attempting to query for current state. If - * the query succeeds, the workflow is reachable. + * the query succeeds, and the workflow is not marked as deleted, the workflow is reachable. */ @VisibleForTesting - boolean isWorkflowReachable(final String workflowName) { + boolean isWorkflowReachable(final UUID connectionId) { try { - final ConnectionManagerWorkflow connectionManagerWorkflow = getExistingWorkflow(ConnectionManagerWorkflow.class, workflowName); - connectionManagerWorkflow.getState(); - + ConnectionManagerUtils.getConnectionManagerWorkflow(client, connectionId); return true; } catch (final Exception e) { return false; @@ -524,9 +467,9 @@ boolean isWorkflowReachable(final String workflowName) { * Check if a workflow is reachable and has state {@link WorkflowState#isRunning()} */ @VisibleForTesting - boolean isWorkflowStateRunning(final String workflowName) { + boolean isWorkflowStateRunning(final UUID connectionId) { try { - final ConnectionManagerWorkflow connectionManagerWorkflow = getExistingWorkflow(ConnectionManagerWorkflow.class, workflowName); + final ConnectionManagerWorkflow connectionManagerWorkflow = ConnectionManagerUtils.getConnectionManagerWorkflow(client, connectionId); return connectionManagerWorkflow.getState().isRunning(); } catch (final Exception e) { @@ -534,8 +477,4 @@ boolean isWorkflowStateRunning(final String workflowName) { } } - static String getConnectionManagerName(final UUID connectionId) { - return "connection_manager_" + connectionId; - } - } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/exception/DeletedWorkflowException.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/exception/DeletedWorkflowException.java new file mode 100644 index 0000000000000..234dcfac612db --- /dev/null +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/exception/DeletedWorkflowException.java @@ -0,0 +1,13 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.workers.temporal.exception; + +public class DeletedWorkflowException extends Exception { + + public DeletedWorkflowException(final String message) { + super(message); + } + +} diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/exception/UnreachableWorkflowException.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/exception/UnreachableWorkflowException.java new file mode 100644 index 0000000000000..8ae77e3e50410 --- /dev/null +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/exception/UnreachableWorkflowException.java @@ -0,0 +1,13 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.workers.temporal.exception; + +public class UnreachableWorkflowException extends Exception { + + public UnreachableWorkflowException(final String message, final Throwable t) { + super(message, t); + } + +} diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java index 96b31fdc2e1b5..310efd5db39a1 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java @@ -29,6 +29,7 @@ import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptFailureInput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptNumberCreationOutput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptNumberFailureInput; +import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.EnsureCleanJobStateInput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCancelledInput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCancelledInputWithAttemptNumber; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCreationInput; @@ -69,6 +70,9 @@ public class ConnectionManagerWorkflowImpl implements ConnectionManagerWorkflow private static final String RENAME_ATTEMPT_ID_TO_NUMBER_TAG = "rename_attempt_id_to_number"; private static final int RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION = 1; + private static final String ENSURE_CLEAN_JOB_STATE = "ensure_clean_job_state"; + private static final int ENSURE_CLEAN_JOB_STATE_CURRENT_VERSION = 1; + private WorkflowState workflowState = new WorkflowState(UUID.randomUUID(), new NoopStateListener()); private final WorkflowInternalState workflowInternalState = new WorkflowInternalState(); @@ -128,6 +132,11 @@ private CancellationScope generateSyncWorkflowRunnable(final ConnectionUpdaterIn return Workflow.newCancellationScope(() -> { connectionId = connectionUpdaterInput.getConnectionId(); + // Clean the job state by failing any jobs for this connection that are currently non-terminal. + // This catches cases where the temporal workflow was terminated and restarted while a job was + // actively running, leaving that job in an orphaned and non-terminal state. + ensureCleanJobState(connectionUpdaterInput); + // workflow state is only ever set in test cases. for production cases, it will always be null. if (connectionUpdaterInput.getWorkflowState() != null) { workflowState = connectionUpdaterInput.getWorkflowState(); @@ -136,9 +145,12 @@ private CancellationScope generateSyncWorkflowRunnable(final ConnectionUpdaterIn // when a reset is triggered, the previous attempt, cancels itself (unless it is already a reset, in // which case it does nothing). the previous run that cancels itself then passes on the // resetConnection flag to the next run so that that run can execute the actual reset - workflowState.setResetConnection(connectionUpdaterInput.isResetConnection()); - - workflowState.setResetWithScheduling(connectionUpdaterInput.isFromJobResetFailure()); + if (connectionUpdaterInput.isResetConnection()) { + workflowState.setResetConnection(true); + } + if (connectionUpdaterInput.isFromJobResetFailure()) { + workflowState.setResetWithScheduling(true); + } final Duration timeToWait = getTimeToWait(connectionUpdaterInput.getConnectionId()); @@ -432,6 +444,23 @@ private Duration getTimeToWait(final UUID connectionId) { return scheduleRetrieverOutput.getTimeToWait(); } + private void ensureCleanJobState(final ConnectionUpdaterInput connectionUpdaterInput) { + final int ensureCleanJobStateVersion = + Workflow.getVersion(ENSURE_CLEAN_JOB_STATE, Workflow.DEFAULT_VERSION, ENSURE_CLEAN_JOB_STATE_CURRENT_VERSION); + + // For backwards compatibility and determinism, skip if workflow existed before this change + if (ensureCleanJobStateVersion < ENSURE_CLEAN_JOB_STATE_CURRENT_VERSION) { + return; + } + + if (connectionUpdaterInput.getJobId() != null) { + log.info("This workflow is already attached to a job, so no need to clean job state."); + return; + } + + runMandatoryActivity(jobCreationAndStatusUpdateActivity::ensureCleanJobState, new EnsureCleanJobStateInput(connectionId)); + } + /** * Creates a new job if it is not present in the input. If the jobId is specified in the input of * the connectionManagerWorkflow, we will return it. Otherwise we will create a job and return its diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivity.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivity.java index 2de587c2a15bd..87c0b9ef488cf 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivity.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivity.java @@ -222,4 +222,16 @@ class ReportJobStartInput { @ActivityMethod void reportJobStart(ReportJobStartInput reportJobStartInput); + @Data + @NoArgsConstructor + @AllArgsConstructor + class EnsureCleanJobStateInput { + + private UUID connectionId; + + } + + @ActivityMethod + void ensureCleanJobState(EnsureCleanJobStateInput input); + } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityImpl.java index 84643c243c60d..2f7cf4dec7951 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityImpl.java @@ -23,6 +23,7 @@ import io.airbyte.metrics.lib.DogStatsDMetricSingleton; import io.airbyte.metrics.lib.MetricTags; import io.airbyte.metrics.lib.OssMetricsRegistry; +import io.airbyte.scheduler.models.Attempt; import io.airbyte.scheduler.models.Job; import io.airbyte.scheduler.persistence.JobCreator; import io.airbyte.scheduler.persistence.JobNotifier; @@ -32,6 +33,7 @@ import io.airbyte.scheduler.persistence.job_tracker.JobTracker.JobState; import io.airbyte.validation.json.JsonValidationException; import io.airbyte.workers.JobStatus; +import io.airbyte.workers.helper.FailureHelper; import io.airbyte.workers.temporal.exception.RetryableException; import io.airbyte.workers.worker_run.TemporalWorkerRunFactory; import io.airbyte.workers.worker_run.WorkerRun; @@ -60,6 +62,12 @@ public class JobCreationAndStatusUpdateActivityImpl implements JobCreationAndSta @Override public JobCreationOutput createNewJob(final JobCreationInput input) { try { + // Fail non-terminal jobs first to prevent this activity from repeatedly trying to create a new job + // and failing, potentially resulting in the workflow ending up in a quarantined state. + // Another non-terminal job is not expected to exist at this point in the normal case, but this + // could happen in special edge cases for example when migrating to this from the old scheduler. + failNonTerminalJobs(input.getConnectionId()); + final StandardSync standardSync = configRepository.getStandardSync(input.getConnectionId()); if (input.isReset()) { @@ -262,6 +270,42 @@ public void reportJobStart(final ReportJobStartInput input) { } } + @Override + public void ensureCleanJobState(final EnsureCleanJobStateInput input) { + failNonTerminalJobs(input.getConnectionId()); + } + + private void failNonTerminalJobs(final UUID connectionId) { + try { + final List jobs = jobPersistence.listJobsForConnectionWithStatuses(connectionId, Job.REPLICATION_TYPES, + io.airbyte.scheduler.models.JobStatus.NON_TERMINAL_STATUSES); + for (final Job job : jobs) { + final long jobId = job.getId(); + log.info("Failing non-terminal job {}", jobId); + jobPersistence.failJob(jobId); + + // fail all non-terminal attempts + for (final Attempt attempt : job.getAttempts()) { + if (Attempt.isAttemptInTerminalState(attempt)) { + continue; + } + + // the Attempt object 'id' is actually the value of the attempt_number column in the db + final int attemptNumber = (int) attempt.getId(); + jobPersistence.failAttempt(jobId, attemptNumber); + jobPersistence.writeAttemptFailureSummary(jobId, attemptNumber, + FailureHelper.failureSummaryForTemporalCleaningJobState(jobId, attemptNumber)); + } + + final Job failedJob = jobPersistence.getJob(jobId); + jobNotifier.failJob("Failing job in order to start from clean job state for new temporal workflow run.", failedJob); + trackCompletion(failedJob, JobStatus.FAILED); + } + } catch (final IOException e) { + throw new RetryableException(e); + } + } + private void emitJobIdToReleaseStagesMetric(final OssMetricsRegistry metric, final long jobId) throws IOException { final var releaseStages = configRepository.getJobIdToReleaseStages(jobId); if (releaseStages == null || releaseStages.size() == 0) { diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/TemporalClientTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/TemporalClientTest.java index 8292744b3bc52..bd42a178d8773 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/TemporalClientTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/TemporalClientTest.java @@ -12,11 +12,11 @@ import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; import com.google.common.collect.Sets; @@ -33,11 +33,12 @@ import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.scheduler.models.IntegrationLauncherConfig; import io.airbyte.scheduler.models.JobRunConfig; -import io.airbyte.workers.temporal.TemporalClient.ManualSyncSubmissionResult; +import io.airbyte.workers.temporal.TemporalClient.ManualOperationResult; import io.airbyte.workers.temporal.check.connection.CheckConnectionWorkflow; import io.airbyte.workers.temporal.discover.catalog.DiscoverCatalogWorkflow; import io.airbyte.workers.temporal.scheduling.ConnectionManagerWorkflow; import io.airbyte.workers.temporal.scheduling.ConnectionManagerWorkflow.JobInformation; +import io.airbyte.workers.temporal.scheduling.ConnectionManagerWorkflowImpl; import io.airbyte.workers.temporal.scheduling.state.WorkflowState; import io.airbyte.workers.temporal.spec.SpecWorkflow; import io.airbyte.workers.temporal.sync.SyncWorkflow; @@ -45,6 +46,7 @@ import io.temporal.client.WorkflowClient; import io.temporal.client.WorkflowOptions; import io.temporal.serviceclient.WorkflowServiceStubs; +import io.temporal.workflow.Functions.Proc; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; @@ -55,6 +57,7 @@ import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Test; +import org.mockito.ArgumentCaptor; import org.mockito.Mockito; class TemporalClientTest { @@ -209,6 +212,9 @@ void testSubmitSync() { @Test public void testSynchronousResetConnection() { final ConnectionManagerWorkflow mConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); + final WorkflowState mWorkflowState = mock(WorkflowState.class); + when(mConnectionManagerWorkflow.getState()).thenReturn(mWorkflowState); + when(mWorkflowState.isDeleted()).thenReturn(false); final long jobId1 = 1L; final long jobId2 = 2L; final long jobId3 = 3L; @@ -221,15 +227,15 @@ public void testSynchronousResetConnection() { new JobInformation(jobId3, 0), new JobInformation(jobId3, 0)); - doReturn(true).when(temporalClient).isWorkflowReachable(anyString()); + doReturn(true).when(temporalClient).isWorkflowReachable(any(UUID.class)); when(workflowClient.newWorkflowStub(any(Class.class), anyString())).thenReturn(mConnectionManagerWorkflow); - final ManualSyncSubmissionResult manualSyncSubmissionResult = temporalClient.synchronousResetConnection(CONNECTION_ID); + final ManualOperationResult manualOperationResult = temporalClient.synchronousResetConnection(CONNECTION_ID); verify(mConnectionManagerWorkflow).resetConnection(); - assertEquals(manualSyncSubmissionResult.getJobId().get(), jobId3); + assertEquals(manualOperationResult.getJobId().get(), jobId3); } } @@ -245,13 +251,14 @@ public void migrateCalled() { final UUID migratedId = UUID.randomUUID(); doReturn(false) - .when(temporalClient).isInRunningWorkflowCache(TemporalClient.getConnectionManagerName(nonMigratedId)); + .when(temporalClient).isInRunningWorkflowCache(ConnectionManagerUtils.getConnectionManagerName(nonMigratedId)); doReturn(true) - .when(temporalClient).isInRunningWorkflowCache(TemporalClient.getConnectionManagerName(migratedId)); + .when(temporalClient).isInRunningWorkflowCache(ConnectionManagerUtils.getConnectionManagerName(migratedId)); doNothing() .when(temporalClient).refreshRunningWorkflow(); - doNothing() + final ConnectionManagerWorkflow mConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); + doReturn(mConnectionManagerWorkflow) .when(temporalClient).submitConnectionUpdaterAsync(nonMigratedId); temporalClient.migrateSyncIfNeeded(Sets.newHashSet(nonMigratedId, migratedId)); @@ -268,11 +275,14 @@ class DeleteConnection { @Test @SuppressWarnings("unchecked") - @DisplayName("Test delete connection method.") + @DisplayName("Test delete connection method when workflow is in a running state.") void testDeleteConnection() { final ConnectionManagerWorkflow mConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); + final WorkflowState mWorkflowState = mock(WorkflowState.class); + when(mConnectionManagerWorkflow.getState()).thenReturn(mWorkflowState); + when(mWorkflowState.isDeleted()).thenReturn(false); - doReturn(true).when(temporalClient).isWorkflowReachable(anyString()); + doReturn(true).when(temporalClient).isWorkflowReachable(any(UUID.class)); when(workflowClient.newWorkflowStub(any(Class.class), anyString())).thenReturn(mConnectionManagerWorkflow); final JobSyncConfig syncConfig = new JobSyncConfig() @@ -294,20 +304,43 @@ void testDeleteConnection() { @SuppressWarnings("unchecked") @DisplayName("Test delete connection method when workflow is in an unexpected state") void testDeleteConnectionInUnexpectedState() { - final ConnectionManagerWorkflow mConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); - final BatchRequest mBatchRequest = mock(BatchRequest.class); + final ConnectionManagerWorkflow mTerminatedConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); + when(mTerminatedConnectionManagerWorkflow.getState()) + .thenThrow(new IllegalStateException("Force state exception to simulate workflow not running")); + when(workflowClient.newWorkflowStub(any(Class.class), any(String.class))).thenReturn(mTerminatedConnectionManagerWorkflow); - doThrow(new IllegalStateException("Force illegal state")).when(temporalClient).getConnectionUpdateWorkflow(CONNECTION_ID); - when(workflowClient.newWorkflowStub(any(Class.class), any(WorkflowOptions.class))).thenReturn(mConnectionManagerWorkflow); + final ConnectionManagerWorkflow mNewConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); + when(workflowClient.newWorkflowStub(any(Class.class), any(WorkflowOptions.class))).thenReturn(mNewConnectionManagerWorkflow); + final BatchRequest mBatchRequest = mock(BatchRequest.class); when(workflowClient.newSignalWithStartRequest()).thenReturn(mBatchRequest); temporalClient.deleteConnection(CONNECTION_ID); + verify(workflowClient).signalWithStart(mBatchRequest); - // this is only called when getting existing workflow - verify(workflowClient, Mockito.never()).newWorkflowStub(any(), anyString()); + // Verify that the deleteConnection signal was passed to the batch request by capturing the + // argument, + // executing the signal, and verifying that the desired signal was executed + final ArgumentCaptor batchRequestAddArgCaptor = ArgumentCaptor.forClass(Proc.class); + verify(mBatchRequest).add(batchRequestAddArgCaptor.capture()); + final Proc signal = batchRequestAddArgCaptor.getValue(); + signal.apply(); + verify(mNewConnectionManagerWorkflow).deleteConnection(); + } - verify(workflowClient).newSignalWithStartRequest(); - verify(workflowClient).signalWithStart(mBatchRequest); + @Test + @SuppressWarnings("unchecked") + @DisplayName("Test delete connection method when workflow has already been deleted") + void testDeleteConnectionOnDeletedWorkflow() { + final ConnectionManagerWorkflow mConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); + final WorkflowState mWorkflowState = mock(WorkflowState.class); + when(mConnectionManagerWorkflow.getState()).thenReturn(mWorkflowState); + when(mWorkflowState.isDeleted()).thenReturn(true); + when(workflowClient.newWorkflowStub(any(), anyString())).thenReturn(mConnectionManagerWorkflow); + + temporalClient.deleteConnection(CONNECTION_ID); + + verify(temporalClient).deleteConnection(CONNECTION_ID); + verifyNoMoreInteractions(temporalClient); } } @@ -324,6 +357,7 @@ void testUpdateConnection() { final WorkflowState mWorkflowState = mock(WorkflowState.class); when(mWorkflowState.isRunning()).thenReturn(true); + when(mWorkflowState.isDeleted()).thenReturn(false); when(mConnectionManagerWorkflow.getState()).thenReturn(mWorkflowState); when(workflowClient.newWorkflowStub(any(Class.class), any(String.class))).thenReturn(mConnectionManagerWorkflow); @@ -337,13 +371,10 @@ void testUpdateConnection() { @DisplayName("Test update connection method starts a new workflow when workflow is in an unexpected state") void testUpdateConnectionInUnexpectedState() { final ConnectionManagerWorkflow mConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); - final BatchRequest mBatchRequest = mock(BatchRequest.class); when(mConnectionManagerWorkflow.getState()).thenThrow(new IllegalStateException("Force state exception to simulate workflow not running")); when(workflowClient.newWorkflowStub(any(Class.class), any(String.class))).thenReturn(mConnectionManagerWorkflow); - when(workflowClient.newWorkflowStub(any(Class.class), any(WorkflowOptions.class))).thenReturn(mConnectionManagerWorkflow); - when(workflowClient.newSignalWithStartRequest()).thenReturn(mBatchRequest); - doNothing().when(temporalClient).submitConnectionUpdaterAsync(CONNECTION_ID); + doReturn(mConnectionManagerWorkflow).when(temporalClient).submitConnectionUpdaterAsync(CONNECTION_ID); temporalClient.update(CONNECTION_ID); @@ -353,6 +384,287 @@ void testUpdateConnectionInUnexpectedState() { verify(temporalClient, Mockito.times(1)).submitConnectionUpdaterAsync(CONNECTION_ID); } + @Test + @SuppressWarnings("unchecked") + @DisplayName("Test update connection method does nothing when connection is deleted") + void testUpdateConnectionDeletedWorkflow() { + final ConnectionManagerWorkflow mConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); + final WorkflowState mWorkflowState = mock(WorkflowState.class); + when(mConnectionManagerWorkflow.getState()).thenReturn(mWorkflowState); + when(mWorkflowState.isDeleted()).thenReturn(true); + when(workflowClient.newWorkflowStub(any(), anyString())).thenReturn(mConnectionManagerWorkflow); + + temporalClient.update(CONNECTION_ID); + + // this is only called when updating an existing workflow + verify(mConnectionManagerWorkflow, Mockito.never()).connectionUpdated(); + verify(temporalClient).update(CONNECTION_ID); + verifyNoMoreInteractions(temporalClient); + } + + } + + @Nested + @DisplayName("Test manual sync behavior") + class ManualSync { + + @Test + @DisplayName("Test startNewManualSync successful") + void testStartNewManualSyncSuccess() { + final ConnectionManagerWorkflow mConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); + final WorkflowState mWorkflowState = mock(WorkflowState.class); + when(mConnectionManagerWorkflow.getState()).thenReturn(mWorkflowState); + when(mWorkflowState.isDeleted()).thenReturn(false); + when(mWorkflowState.isRunning()).thenReturn(false).thenReturn(true); + when(mConnectionManagerWorkflow.getJobInformation()).thenReturn(new JobInformation(JOB_ID, ATTEMPT_ID)); + when(workflowClient.newWorkflowStub(any(), anyString())).thenReturn(mConnectionManagerWorkflow); + + final ManualOperationResult result = temporalClient.startNewManualSync(CONNECTION_ID); + + assertTrue(result.getJobId().isPresent()); + assertEquals(JOB_ID, result.getJobId().get()); + assertFalse(result.getFailingReason().isPresent()); + verify(mConnectionManagerWorkflow).submitManualSync(); + } + + @Test + @DisplayName("Test startNewManualSync fails if job is already running") + void testStartNewManualSyncAlreadyRunning() { + final ConnectionManagerWorkflow mConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); + final WorkflowState mWorkflowState = mock(WorkflowState.class); + when(mConnectionManagerWorkflow.getState()).thenReturn(mWorkflowState); + when(mWorkflowState.isDeleted()).thenReturn(false); + when(mWorkflowState.isRunning()).thenReturn(true); + when(mConnectionManagerWorkflow.getJobInformation()).thenReturn(new JobInformation(JOB_ID, ATTEMPT_ID)); + when(workflowClient.newWorkflowStub(any(), anyString())).thenReturn(mConnectionManagerWorkflow); + + final ManualOperationResult result = temporalClient.startNewManualSync(CONNECTION_ID); + + assertFalse(result.getJobId().isPresent()); + assertTrue(result.getFailingReason().isPresent()); + verify(mConnectionManagerWorkflow, times(0)).submitManualSync(); + } + + @Test + @DisplayName("Test startNewManualSync repairs the workflow if it is in a bad state") + void testStartNewManualSyncRepairsBadWorkflowState() { + final ConnectionManagerWorkflow mTerminatedConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); + when(mTerminatedConnectionManagerWorkflow.getState()) + .thenThrow(new IllegalStateException("Force state exception to simulate workflow not running")); + when(mTerminatedConnectionManagerWorkflow.getJobInformation()).thenReturn(new JobInformation(JOB_ID, ATTEMPT_ID)); + when(workflowClient.newWorkflowStub(any(Class.class), any(String.class))).thenReturn(mTerminatedConnectionManagerWorkflow); + + final ConnectionManagerWorkflow mNewConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); + final WorkflowState mWorkflowState = mock(WorkflowState.class); + when(mNewConnectionManagerWorkflow.getState()).thenReturn(mWorkflowState); + when(mWorkflowState.isDeleted()).thenReturn(false); + when(mWorkflowState.isRunning()).thenReturn(false).thenReturn(true); + when(mNewConnectionManagerWorkflow.getJobInformation()).thenReturn(new JobInformation(JOB_ID, ATTEMPT_ID)); + when(workflowClient.newWorkflowStub(any(Class.class), any(WorkflowOptions.class))).thenReturn(mNewConnectionManagerWorkflow); + final BatchRequest mBatchRequest = mock(BatchRequest.class); + when(workflowClient.newSignalWithStartRequest()).thenReturn(mBatchRequest); + + final ManualOperationResult result = temporalClient.startNewManualSync(CONNECTION_ID); + + assertTrue(result.getJobId().isPresent()); + assertEquals(JOB_ID, result.getJobId().get()); + assertFalse(result.getFailingReason().isPresent()); + verify(workflowClient).signalWithStart(mBatchRequest); + + // Verify that the submitManualSync signal was passed to the batch request by capturing the + // argument, + // executing the signal, and verifying that the desired signal was executed + final ArgumentCaptor batchRequestAddArgCaptor = ArgumentCaptor.forClass(Proc.class); + verify(mBatchRequest).add(batchRequestAddArgCaptor.capture()); + final Proc signal = batchRequestAddArgCaptor.getValue(); + signal.apply(); + verify(mNewConnectionManagerWorkflow).submitManualSync(); + } + + @Test + @SuppressWarnings("unchecked") + @DisplayName("Test startNewManualSync returns a failure reason when connection is deleted") + void testStartNewManualSyncDeletedWorkflow() { + final ConnectionManagerWorkflow mConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); + final WorkflowState mWorkflowState = mock(WorkflowState.class); + when(mConnectionManagerWorkflow.getState()).thenReturn(mWorkflowState); + when(mWorkflowState.isDeleted()).thenReturn(true); + when(workflowClient.newWorkflowStub(any(), anyString())).thenReturn(mConnectionManagerWorkflow); + + final ManualOperationResult result = temporalClient.startNewManualSync(CONNECTION_ID); + + // this is only called when updating an existing workflow + assertFalse(result.getJobId().isPresent()); + assertTrue(result.getFailingReason().isPresent()); + verify(mConnectionManagerWorkflow, times(0)).submitManualSync(); + } + + } + + @Nested + @DisplayName("Test cancellation behavior") + class Cancellation { + + @Test + @DisplayName("Test startNewCancellation successful") + void testStartNewCancellationSuccess() { + final ConnectionManagerWorkflow mConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); + final WorkflowState mWorkflowState = mock(WorkflowState.class); + when(mConnectionManagerWorkflow.getState()).thenReturn(mWorkflowState); + when(mWorkflowState.isDeleted()).thenReturn(false); + when(mWorkflowState.isRunning()).thenReturn(true).thenReturn(false); + when(mConnectionManagerWorkflow.getJobInformation()).thenReturn(new JobInformation(JOB_ID, ATTEMPT_ID)); + when(workflowClient.newWorkflowStub(any(), anyString())).thenReturn(mConnectionManagerWorkflow); + + final ManualOperationResult result = temporalClient.startNewCancellation(CONNECTION_ID); + + assertTrue(result.getJobId().isPresent()); + assertEquals(JOB_ID, result.getJobId().get()); + assertFalse(result.getFailingReason().isPresent()); + verify(mConnectionManagerWorkflow).cancelJob(); + } + + @Test + @DisplayName("Test startNewCancellation repairs the workflow if it is in a bad state") + void testStartNewCancellationRepairsBadWorkflowState() { + final ConnectionManagerWorkflow mTerminatedConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); + when(mTerminatedConnectionManagerWorkflow.getState()) + .thenThrow(new IllegalStateException("Force state exception to simulate workflow not running")); + when(mTerminatedConnectionManagerWorkflow.getJobInformation()).thenReturn(new JobInformation(JOB_ID, ATTEMPT_ID)); + when(workflowClient.newWorkflowStub(any(Class.class), any(String.class))).thenReturn(mTerminatedConnectionManagerWorkflow); + + final ConnectionManagerWorkflow mNewConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); + final WorkflowState mWorkflowState = mock(WorkflowState.class); + when(mNewConnectionManagerWorkflow.getState()).thenReturn(mWorkflowState); + when(mWorkflowState.isDeleted()).thenReturn(false); + when(mWorkflowState.isRunning()).thenReturn(true).thenReturn(false); + when(mNewConnectionManagerWorkflow.getJobInformation()).thenReturn(new JobInformation(JOB_ID, ATTEMPT_ID)); + when(workflowClient.newWorkflowStub(any(Class.class), any(WorkflowOptions.class))).thenReturn(mNewConnectionManagerWorkflow); + final BatchRequest mBatchRequest = mock(BatchRequest.class); + when(workflowClient.newSignalWithStartRequest()).thenReturn(mBatchRequest); + + final ManualOperationResult result = temporalClient.startNewCancellation(CONNECTION_ID); + + assertTrue(result.getJobId().isPresent()); + assertEquals(ConnectionManagerWorkflowImpl.NON_RUNNING_JOB_ID, result.getJobId().get()); + assertFalse(result.getFailingReason().isPresent()); + verify(workflowClient).signalWithStart(mBatchRequest); + + // Verify that the cancelJob signal was passed to the batch request by capturing the argument, + // executing the signal, and verifying that the desired signal was executed + final ArgumentCaptor batchRequestAddArgCaptor = ArgumentCaptor.forClass(Proc.class); + verify(mBatchRequest).add(batchRequestAddArgCaptor.capture()); + final Proc signal = batchRequestAddArgCaptor.getValue(); + signal.apply(); + verify(mNewConnectionManagerWorkflow).cancelJob(); + } + + @Test + @SuppressWarnings("unchecked") + @DisplayName("Test startNewCancellation returns a failure reason when connection is deleted") + void testStartNewCancellationDeletedWorkflow() { + final ConnectionManagerWorkflow mConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); + final WorkflowState mWorkflowState = mock(WorkflowState.class); + when(mConnectionManagerWorkflow.getState()).thenReturn(mWorkflowState); + when(mWorkflowState.isDeleted()).thenReturn(true); + when(workflowClient.newWorkflowStub(any(), anyString())).thenReturn(mConnectionManagerWorkflow); + + final ManualOperationResult result = temporalClient.startNewCancellation(CONNECTION_ID); + + // this is only called when updating an existing workflow + assertFalse(result.getJobId().isPresent()); + assertTrue(result.getFailingReason().isPresent()); + verify(mConnectionManagerWorkflow, times(0)).cancelJob(); + } + + } + + @Nested + @DisplayName("Test reset connection behavior") + class ResetConnection { + + @Test + @DisplayName("Test resetConnection successful") + void testResetConnectionSuccess() { + final ConnectionManagerWorkflow mConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); + final WorkflowState mWorkflowState = mock(WorkflowState.class); + when(mConnectionManagerWorkflow.getState()).thenReturn(mWorkflowState); + when(mWorkflowState.isDeleted()).thenReturn(false); + when(mWorkflowState.isRunning()).thenReturn(false); + final long jobId1 = 1; + final long jobId2 = 2; + when(mConnectionManagerWorkflow.getJobInformation()).thenReturn( + new JobInformation(jobId1, 0), + new JobInformation(jobId1, 0), + new JobInformation(jobId2, 0), + new JobInformation(jobId2, 0)); + when(workflowClient.newWorkflowStub(any(), anyString())).thenReturn(mConnectionManagerWorkflow); + + final ManualOperationResult result = temporalClient.resetConnection(CONNECTION_ID); + + assertTrue(result.getJobId().isPresent()); + assertEquals(jobId2, result.getJobId().get()); + assertFalse(result.getFailingReason().isPresent()); + verify(mConnectionManagerWorkflow).resetConnection(); + } + + @Test + @DisplayName("Test resetConnection repairs the workflow if it is in a bad state") + void testResetConnectionRepairsBadWorkflowState() { + final ConnectionManagerWorkflow mTerminatedConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); + when(mTerminatedConnectionManagerWorkflow.getState()) + .thenThrow(new IllegalStateException("Force state exception to simulate workflow not running")); + when(mTerminatedConnectionManagerWorkflow.getJobInformation()).thenReturn(new JobInformation(JOB_ID, ATTEMPT_ID)); + when(workflowClient.newWorkflowStub(any(Class.class), any(String.class))).thenReturn(mTerminatedConnectionManagerWorkflow); + + final ConnectionManagerWorkflow mNewConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); + final WorkflowState mWorkflowState = mock(WorkflowState.class); + when(mNewConnectionManagerWorkflow.getState()).thenReturn(mWorkflowState); + when(mWorkflowState.isDeleted()).thenReturn(false); + when(mWorkflowState.isRunning()).thenReturn(false); + when(mNewConnectionManagerWorkflow.getJobInformation()).thenReturn( + new JobInformation(ConnectionManagerWorkflowImpl.NON_RUNNING_JOB_ID, 0), + new JobInformation(ConnectionManagerWorkflowImpl.NON_RUNNING_JOB_ID, 0), + new JobInformation(JOB_ID, 0), + new JobInformation(JOB_ID, 0)); + when(workflowClient.newWorkflowStub(any(Class.class), any(WorkflowOptions.class))).thenReturn(mNewConnectionManagerWorkflow); + final BatchRequest mBatchRequest = mock(BatchRequest.class); + when(workflowClient.newSignalWithStartRequest()).thenReturn(mBatchRequest); + + final ManualOperationResult result = temporalClient.resetConnection(CONNECTION_ID); + + assertTrue(result.getJobId().isPresent()); + assertEquals(JOB_ID, result.getJobId().get()); + assertFalse(result.getFailingReason().isPresent()); + verify(workflowClient).signalWithStart(mBatchRequest); + + // Verify that the resetConnection signal was passed to the batch request by capturing the argument, + // executing the signal, and verifying that the desired signal was executed + final ArgumentCaptor batchRequestAddArgCaptor = ArgumentCaptor.forClass(Proc.class); + verify(mBatchRequest).add(batchRequestAddArgCaptor.capture()); + final Proc signal = batchRequestAddArgCaptor.getValue(); + signal.apply(); + verify(mNewConnectionManagerWorkflow).resetConnection(); + } + + @Test + @SuppressWarnings("unchecked") + @DisplayName("Test resetConnection returns a failure reason when connection is deleted") + void testResetConnectionDeletedWorkflow() { + final ConnectionManagerWorkflow mConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); + final WorkflowState mWorkflowState = mock(WorkflowState.class); + when(mConnectionManagerWorkflow.getState()).thenReturn(mWorkflowState); + when(mWorkflowState.isDeleted()).thenReturn(true); + when(workflowClient.newWorkflowStub(any(), anyString())).thenReturn(mConnectionManagerWorkflow); + + final ManualOperationResult result = temporalClient.resetConnection(CONNECTION_ID); + + // this is only called when updating an existing workflow + assertFalse(result.getJobId().isPresent()); + assertTrue(result.getFailingReason().isPresent()); + verify(mConnectionManagerWorkflow, times(0)).resetConnection(); + } + } } diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowTest.java index 9aadf0279269b..85a661917fc49 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowTest.java @@ -413,6 +413,28 @@ public void deleteSync() throws InterruptedException { Mockito.verify(mConnectionDeletionActivity, Mockito.times(1)).deleteConnection(Mockito.any()); } + @RepeatedTest(10) + @Timeout(value = 2, + unit = TimeUnit.SECONDS) + @DisplayName("Test that fresh workflow cleans the job state") + public void testStartFromCleanJobState() throws InterruptedException { + final ConnectionUpdaterInput input = ConnectionUpdaterInput.builder() + .connectionId(UUID.randomUUID()) + .jobId(null) + .attemptId(null) + .fromFailure(false) + .attemptNumber(1) + .workflowState(null) + .resetConnection(false) + .fromJobResetFailure(false) + .build(); + + startWorkflowAndWaitUntilReady(workflow, input); + testEnv.sleep(Duration.ofSeconds(30L)); + + Mockito.verify(mJobCreationAndStatusUpdateActivity, Mockito.times(1)).ensureCleanJobState(Mockito.any()); + } + } @Nested diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityTest.java index 5448541f87cd4..c1379d618a4cf 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityTest.java @@ -4,10 +4,15 @@ package io.airbyte.workers.temporal.scheduling.activities; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; + import io.airbyte.config.AttemptFailureSummary; import io.airbyte.config.Configs.WorkerEnvironment; import io.airbyte.config.FailureReason; import io.airbyte.config.FailureReason.FailureOrigin; +import io.airbyte.config.JobConfig; +import io.airbyte.config.JobConfig.ConfigType; import io.airbyte.config.JobOutput; import io.airbyte.config.NormalizationSummary; import io.airbyte.config.StandardSync; @@ -18,7 +23,10 @@ import io.airbyte.config.helpers.LogConfigs; import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.scheduler.models.Attempt; +import io.airbyte.scheduler.models.AttemptStatus; import io.airbyte.scheduler.models.Job; +import io.airbyte.scheduler.models.JobStatus; import io.airbyte.scheduler.persistence.JobNotifier; import io.airbyte.scheduler.persistence.JobPersistence; import io.airbyte.scheduler.persistence.job_factory.SyncJobFactory; @@ -30,6 +38,7 @@ import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptCreationOutput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptFailureInput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptNumberCreationOutput; +import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.EnsureCleanJobStateInput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCancelledInput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCreationInput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCreationOutput; @@ -40,6 +49,7 @@ import java.io.IOException; import java.nio.file.Path; import java.util.Collections; +import java.util.List; import java.util.UUID; import org.assertj.core.api.Assertions; import org.junit.jupiter.api.DisplayName; @@ -224,7 +234,7 @@ public void setJobSuccess() throws IOException { Mockito.verify(mJobPersistence).writeOutput(JOB_ID, ATTEMPT_ID, jobOutput); Mockito.verify(mJobPersistence).succeedAttempt(JOB_ID, ATTEMPT_ID); Mockito.verify(mJobNotifier).successJob(Mockito.any()); - Mockito.verify(mJobtracker).trackSync(Mockito.any(), Mockito.eq(JobState.SUCCEEDED)); + Mockito.verify(mJobtracker).trackSync(Mockito.any(), eq(JobState.SUCCEEDED)); } @Test @@ -242,7 +252,7 @@ public void setJobFailure() throws IOException { jobCreationAndStatusUpdateActivity.jobFailure(new JobFailureInput(JOB_ID, "reason")); Mockito.verify(mJobPersistence).failJob(JOB_ID); - Mockito.verify(mJobNotifier).failJob(Mockito.eq("reason"), Mockito.any()); + Mockito.verify(mJobNotifier).failJob(eq("reason"), Mockito.any()); } @Test @@ -295,6 +305,36 @@ public void setJobCancelledWrapException() throws IOException { .hasCauseInstanceOf(IOException.class); } + @Test + public void ensureCleanJobState() throws IOException { + final Attempt failedAttempt = new Attempt(0, 1, Path.of(""), null, AttemptStatus.FAILED, null, 2L, 3L, 3L); + final int runningAttemptNumber = 1; + final Attempt runningAttempt = new Attempt(runningAttemptNumber, 1, Path.of(""), null, AttemptStatus.RUNNING, null, 4L, 5L, null); + final Job runningJob = new Job(1, ConfigType.SYNC, CONNECTION_ID.toString(), new JobConfig(), List.of(failedAttempt, runningAttempt), + JobStatus.RUNNING, 2L, 2L, 3L); + + final Job pendingJob = new Job(2, ConfigType.SYNC, CONNECTION_ID.toString(), new JobConfig(), List.of(), JobStatus.PENDING, 4L, 4L, 5L); + + Mockito.when(mJobPersistence.listJobsForConnectionWithStatuses(CONNECTION_ID, Job.REPLICATION_TYPES, JobStatus.NON_TERMINAL_STATUSES)) + .thenReturn(List.of(runningJob, pendingJob)); + Mockito.when(mJobPersistence.getJob(runningJob.getId())).thenReturn(runningJob); + Mockito.when(mJobPersistence.getJob(pendingJob.getId())).thenReturn(pendingJob); + + jobCreationAndStatusUpdateActivity.ensureCleanJobState(new EnsureCleanJobStateInput(CONNECTION_ID)); + + Mockito.verify(mJobPersistence).failJob(runningJob.getId()); + Mockito.verify(mJobPersistence).failJob(pendingJob.getId()); + Mockito.verify(mJobPersistence).failAttempt(runningJob.getId(), runningAttemptNumber); + Mockito.verify(mJobPersistence).writeAttemptFailureSummary(eq(runningJob.getId()), eq(runningAttemptNumber), any()); + Mockito.verify(mJobPersistence).getJob(runningJob.getId()); + Mockito.verify(mJobPersistence).getJob(pendingJob.getId()); + Mockito.verify(mJobNotifier).failJob(any(), eq(runningJob)); + Mockito.verify(mJobNotifier).failJob(any(), eq(pendingJob)); + Mockito.verify(mJobtracker).trackSync(runningJob, JobState.FAILED); + Mockito.verify(mJobtracker).trackSync(pendingJob, JobState.FAILED); + Mockito.verifyNoMoreInteractions(mJobPersistence, mJobNotifier, mJobtracker); + } + } } From 04eb3b402ee37a648fba7b8da7cbc78518f91df4 Mon Sep 17 00:00:00 2001 From: Davin Chia Date: Fri, 13 May 2022 09:47:11 +0800 Subject: [PATCH 52/55] Run connector build when airbyte protocol is updated. (#12831) --- .github/workflows/gradle.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/gradle.yml b/.github/workflows/gradle.yml index 034e8cf0a899a..ceb91fe7a2fa8 100644 --- a/.github/workflows/gradle.yml +++ b/.github/workflows/gradle.yml @@ -57,6 +57,7 @@ jobs: - 'octavia-cli/**' connectors: - 'airbyte-cdk/**' + - 'airbyte-protocol/**' - 'airbyte-integrations/**' db: - 'airbyte-db/**' From b8703ea6a113bc3c4fd4fe7d309802ca8632673f Mon Sep 17 00:00:00 2001 From: "Pedro S. Lopez" Date: Thu, 12 May 2022 22:38:30 -0400 Subject: [PATCH 53/55] update CDK airbyte protocol models to fix master build (#12829) --- .../airbyte_cdk/models/airbyte_protocol.py | 31 ++++++++++++++++--- 1 file changed, 27 insertions(+), 4 deletions(-) diff --git a/airbyte-cdk/python/airbyte_cdk/models/airbyte_protocol.py b/airbyte-cdk/python/airbyte_cdk/models/airbyte_protocol.py index e309b7f00d8d7..5e904cf70c4e0 100644 --- a/airbyte-cdk/python/airbyte_cdk/models/airbyte_protocol.py +++ b/airbyte-cdk/python/airbyte_cdk/models/airbyte_protocol.py @@ -36,11 +36,16 @@ class Config: namespace: Optional[str] = Field(None, description="the namespace of this record's stream") -class AirbyteStateMessage(BaseModel): - class Config: - extra = Extra.allow +class AirbyteStateType(Enum): + GLOBAL = "GLOBAL" + PER_STREAM = "PER_STREAM" + - data: Dict[str, Any] = Field(..., description="the state data") +class AirbyteStateBlob(BaseModel): + pass + + class Config: + extra = Extra.forbid class Level(Enum): @@ -157,6 +162,14 @@ class OAuthConfigSpecification(BaseModel): ) +class AirbyteStreamState(BaseModel): + class Config: + extra = Extra.forbid + + name: Optional[str] = Field(None, description="Stream name") + state: Optional[AirbyteStateBlob] = None + + class AirbyteTraceMessage(BaseModel): class Config: extra = Extra.allow @@ -244,6 +257,16 @@ class Config: ) +class AirbyteStateMessage(BaseModel): + class Config: + extra = Extra.allow + + state_type: Optional[AirbyteStateType] = None + data: Optional[Dict[str, Any]] = Field(None, description="(Deprecated) the state data") + global_: Optional[AirbyteStateBlob] = Field(None, alias="global") + streams: Optional[List[AirbyteStreamState]] = None + + class AirbyteCatalog(BaseModel): class Config: extra = Extra.allow From 7d16fdafa1180b2ffab7a3faea439a1b8ebd7050 Mon Sep 17 00:00:00 2001 From: LiRen Tu Date: Fri, 13 May 2022 01:35:00 -0700 Subject: [PATCH 54/55] =?UTF-8?q?=F0=9F=90=9E=20Postgres=20source:=20fix?= =?UTF-8?q?=20azure=20database=20catalog=20retrieval=20(#12834)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Remove host in azure username * Bump version * Fix test * Replace host with jdbc_url * auto-bump connector version Co-authored-by: Octavia Squidington III --- .../resources/seed/source_definitions.yaml | 2 +- .../src/main/resources/seed/source_specs.yaml | 2 +- .../source-postgres-strict-encrypt/Dockerfile | 2 +- .../connectors/source-postgres/Dockerfile | 2 +- .../source/postgres/PostgresSource.java | 20 ++++++++++++++++++- .../source/postgres/PostgresSourceTest.java | 18 +++++++++++++++++ docs/integrations/sources/postgres.md | 3 ++- 7 files changed, 43 insertions(+), 6 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index 23876bf2f510d..e77998c6131a7 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -715,7 +715,7 @@ - name: Postgres sourceDefinitionId: decd338e-5647-4c0b-adf4-da0e75f5a750 dockerRepository: airbyte/source-postgres - dockerImageTag: 0.4.14 + dockerImageTag: 0.4.15 documentationUrl: https://docs.airbyte.io/integrations/sources/postgres icon: postgresql.svg sourceType: database diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index 4f16ad96510bb..4d0669ed6655a 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -6540,7 +6540,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-postgres:0.4.14" +- dockerImage: "airbyte/source-postgres:0.4.15" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/postgres" connectionSpecification: diff --git a/airbyte-integrations/connectors/source-postgres-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/source-postgres-strict-encrypt/Dockerfile index b72ad562fbb9f..a0250be06dc3f 100644 --- a/airbyte-integrations/connectors/source-postgres-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/source-postgres-strict-encrypt/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-postgres-strict-encrypt COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.4.14 +LABEL io.airbyte.version=0.4.15 LABEL io.airbyte.name=airbyte/source-postgres-strict-encrypt diff --git a/airbyte-integrations/connectors/source-postgres/Dockerfile b/airbyte-integrations/connectors/source-postgres/Dockerfile index cc4f371737a20..b527cc286d226 100644 --- a/airbyte-integrations/connectors/source-postgres/Dockerfile +++ b/airbyte-integrations/connectors/source-postgres/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-postgres COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.4.14 +LABEL io.airbyte.version=0.4.15 LABEL io.airbyte.name=airbyte/source-postgres diff --git a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSource.java b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSource.java index 0a8ee9ff0b475..bba6515a244c2 100644 --- a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSource.java +++ b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSource.java @@ -332,7 +332,7 @@ public Set getPrivilegesTableForCurrentUser(final JdbcDatabase WHERE t.grpid = m.oid AND t.userid = r.oid) AND privilege_type = 'SELECT'; """); - final String username = database.getDatabaseConfig().get("username").asText(); + final String username = getUsername(database.getDatabaseConfig()); ps.setString(1, username); ps.setString(2, username); ps.setString(3, username); @@ -349,6 +349,24 @@ public Set getPrivilegesTableForCurrentUser(final JdbcDatabase .collect(toSet()); } + @VisibleForTesting + static String getUsername(final JsonNode databaseConfig) { + final String jdbcUrl = databaseConfig.get("jdbc_url").asText(); + final String username = databaseConfig.get("username").asText(); + + // Azure Postgres server has this username pattern: @. + // Inside Postgres, the true username is just . + // The jdbc_url is constructed in the toDatabaseConfigStatic method. + if (username.contains("@") && jdbcUrl.contains("azure.com:")) { + final String[] tokens = username.split("@"); + final String postgresUsername = tokens[0]; + LOGGER.info("Azure username \"{}\" is detected; use \"{}\" to check permission", username, postgresUsername); + return postgresUsername; + } + + return username; + } + @Override protected boolean isNotInternalSchema(final JsonNode jsonNode, final Set internalSchemas) { return false; diff --git a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresSourceTest.java b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresSourceTest.java index 8cf536879edab..c8cd44c8adbad 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresSourceTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresSourceTest.java @@ -34,6 +34,7 @@ import io.airbyte.test.utils.PostgreSQLContainerHelper; import java.math.BigDecimal; import java.util.List; +import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; @@ -359,4 +360,21 @@ void testIsCdc() { assertTrue(PostgresSource.isCdc(config)); } + @Test + void testGetUsername() { + final String username = "airbyte-user"; + + // normal host + final JsonNode normalConfig = Jsons.jsonNode(Map.of( + "username", username, + "jdbc_url", "jdbc:postgresql://airbyte.database.com:5432:airbyte")); + assertEquals(username, PostgresSource.getUsername(normalConfig)); + + // azure host + final JsonNode azureConfig = Jsons.jsonNode(Map.of( + "username", username + "@airbyte", + "jdbc_url", "jdbc:postgresql://airbyte.azure.com:5432:airbyte")); + assertEquals(username, PostgresSource.getUsername(azureConfig)); + } + } diff --git a/docs/integrations/sources/postgres.md b/docs/integrations/sources/postgres.md index d632904fdbaa1..ae92f27ab7d81 100644 --- a/docs/integrations/sources/postgres.md +++ b/docs/integrations/sources/postgres.md @@ -270,6 +270,7 @@ According to Postgres [documentation](https://www.postgresql.org/docs/14/datatyp | Version | Date | Pull Request | Subject | |:--------|:-----------|:-------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------| +| 0.4.15 | 2022-05-13 | [12834](https://github.com/airbytehq/airbyte/pull/12834) | Fix the bug that the connector returns empty catalog for Azure Postgres database | | 0.4.14 | 2022-05-08 | [12689](https://github.com/airbytehq/airbyte/pull/12689) | Add table retrieval according to role-based `SELECT` privilege | | 0.4.13 | 2022-05-05 | [10230](https://github.com/airbytehq/airbyte/pull/10230) | Explicitly set null value for field in json | | 0.4.12 | 2022-04-29 | [12480](https://github.com/airbytehq/airbyte/pull/12480) | Query tables with adaptive fetch size to optimize JDBC memory consumption | @@ -278,7 +279,7 @@ According to Postgres [documentation](https://www.postgresql.org/docs/14/datatyp | 0.4.8 | 2022-02-21 | [10242](https://github.com/airbytehq/airbyte/pull/10242) | Fixed cursor for old connectors that use non-microsecond format. Now connectors work with both formats | | 0.4.7 | 2022-02-18 | [10242](https://github.com/airbytehq/airbyte/pull/10242) | Updated timestamp transformation with microseconds | | 0.4.6 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | (unpublished) Add `-XX:+ExitOnOutOfMemoryError` JVM option | -| 0.4.5 | 2022-02-08 | [10173](https://github.com/airbytehq/airbyte/pull/10173) | Improved discovering tables in case if user does not have permissions to any table | +| 0.4.5 | 2022-02-08 | [10173](https://github.com/airbytehq/airbyte/pull/10173) | Improved discovering tables in case if user does not have permissions to any table | | 0.4.4 | 2022-01-26 | [9807](https://github.com/airbytehq/airbyte/pull/9807) | Update connector fields title/description | | 0.4.3 | 2022-01-24 | [9554](https://github.com/airbytehq/airbyte/pull/9554) | Allow handling of java sql date in CDC | | 0.4.2 | 2022-01-13 | [9360](https://github.com/airbytehq/airbyte/pull/9360) | Added schema selection | From 0bb3e3b6a3f6f67c726b4b5608029ff2f20af195 Mon Sep 17 00:00:00 2001 From: Octavia Squidington III <90398440+octavia-squidington-iii@users.noreply.github.com> Date: Fri, 13 May 2022 09:35:56 -0300 Subject: [PATCH 55/55] Bump Airbyte version from 0.38.2-alpha to 0.38.3-alpha (#12839) Co-authored-by: timroes --- .bumpversion.cfg | 2 +- .env | 2 +- airbyte-bootloader/Dockerfile | 2 +- airbyte-container-orchestrator/Dockerfile | 2 +- airbyte-metrics/reporter/Dockerfile | 2 +- airbyte-scheduler/app/Dockerfile | 2 +- airbyte-server/Dockerfile | 2 +- airbyte-webapp/package-lock.json | 4 ++-- airbyte-webapp/package.json | 2 +- airbyte-workers/Dockerfile | 2 +- charts/airbyte/Chart.yaml | 2 +- charts/airbyte/README.md | 10 +++++----- charts/airbyte/values.yaml | 10 +++++----- docs/operator-guides/upgrading-airbyte.md | 2 +- kube/overlays/stable-with-resource-limits/.env | 2 +- .../stable-with-resource-limits/kustomization.yaml | 12 ++++++------ kube/overlays/stable/.env | 2 +- kube/overlays/stable/kustomization.yaml | 12 ++++++------ octavia-cli/Dockerfile | 2 +- octavia-cli/README.md | 2 +- octavia-cli/install.sh | 2 +- octavia-cli/setup.py | 2 +- 22 files changed, 41 insertions(+), 41 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 47ae7b955ee14..d2db28752b753 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.38.2-alpha +current_version = 0.38.3-alpha commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\-[a-z]+)? diff --git a/.env b/.env index a24e7001a6d25..71e372bd6ac4b 100644 --- a/.env +++ b/.env @@ -10,7 +10,7 @@ ### SHARED ### -VERSION=0.38.2-alpha +VERSION=0.38.3-alpha # When using the airbyte-db via default docker image CONFIG_ROOT=/data diff --git a/airbyte-bootloader/Dockerfile b/airbyte-bootloader/Dockerfile index e7c0172aac459..ddfb208b2f89e 100644 --- a/airbyte-bootloader/Dockerfile +++ b/airbyte-bootloader/Dockerfile @@ -2,7 +2,7 @@ ARG JDK_VERSION=17.0.1 ARG JDK_IMAGE=openjdk:${JDK_VERSION}-slim FROM ${JDK_IMAGE} -ARG VERSION=0.38.2-alpha +ARG VERSION=0.38.3-alpha ENV APPLICATION airbyte-bootloader ENV VERSION ${VERSION} diff --git a/airbyte-container-orchestrator/Dockerfile b/airbyte-container-orchestrator/Dockerfile index 2c64bf90ffc17..2b49de83ac62e 100644 --- a/airbyte-container-orchestrator/Dockerfile +++ b/airbyte-container-orchestrator/Dockerfile @@ -28,7 +28,7 @@ RUN echo "deb [signed-by=/usr/share/keyrings/kubernetes-archive-keyring.gpg] htt RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y kubectl # Don't change this manually. Bump version expects to make moves based on this string -ARG VERSION=0.38.2-alpha +ARG VERSION=0.38.3-alpha ENV APPLICATION airbyte-container-orchestrator ENV VERSION=${VERSION} diff --git a/airbyte-metrics/reporter/Dockerfile b/airbyte-metrics/reporter/Dockerfile index a1f99b76d276b..5e60bee9074aa 100644 --- a/airbyte-metrics/reporter/Dockerfile +++ b/airbyte-metrics/reporter/Dockerfile @@ -2,7 +2,7 @@ ARG JDK_VERSION=17.0.1 ARG JDK_IMAGE=openjdk:${JDK_VERSION}-slim FROM ${JDK_IMAGE} AS metrics-reporter -ARG VERSION=0.38.2-alpha +ARG VERSION=0.38.3-alpha ENV APPLICATION airbyte-metrics-reporter ENV VERSION ${VERSION} diff --git a/airbyte-scheduler/app/Dockerfile b/airbyte-scheduler/app/Dockerfile index c37cf2f8706b2..44556a41349aa 100644 --- a/airbyte-scheduler/app/Dockerfile +++ b/airbyte-scheduler/app/Dockerfile @@ -2,7 +2,7 @@ ARG JDK_VERSION=17.0.1 ARG JDK_IMAGE=openjdk:${JDK_VERSION}-slim FROM ${JDK_IMAGE} AS scheduler -ARG VERSION=0.38.2-alpha +ARG VERSION=0.38.3-alpha ENV APPLICATION airbyte-scheduler ENV VERSION ${VERSION} diff --git a/airbyte-server/Dockerfile b/airbyte-server/Dockerfile index 0dffbd46aea22..1a4ba3c70a6df 100644 --- a/airbyte-server/Dockerfile +++ b/airbyte-server/Dockerfile @@ -4,7 +4,7 @@ FROM ${JDK_IMAGE} AS server EXPOSE 8000 -ARG VERSION=0.38.2-alpha +ARG VERSION=0.38.3-alpha ENV APPLICATION airbyte-server ENV VERSION ${VERSION} diff --git a/airbyte-webapp/package-lock.json b/airbyte-webapp/package-lock.json index 3bd4a4ffcfb57..fa55a633f90c5 100644 --- a/airbyte-webapp/package-lock.json +++ b/airbyte-webapp/package-lock.json @@ -1,12 +1,12 @@ { "name": "airbyte-webapp", - "version": "0.38.2-alpha", + "version": "0.38.3-alpha", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "airbyte-webapp", - "version": "0.38.2-alpha", + "version": "0.38.3-alpha", "dependencies": { "@fortawesome/fontawesome-svg-core": "^6.1.1", "@fortawesome/free-brands-svg-icons": "^6.1.1", diff --git a/airbyte-webapp/package.json b/airbyte-webapp/package.json index d87295dc6ec73..5eb54d1bacce3 100644 --- a/airbyte-webapp/package.json +++ b/airbyte-webapp/package.json @@ -1,6 +1,6 @@ { "name": "airbyte-webapp", - "version": "0.38.2-alpha", + "version": "0.38.3-alpha", "private": true, "engines": { "node": ">=16.0.0" diff --git a/airbyte-workers/Dockerfile b/airbyte-workers/Dockerfile index 8ce42ab96c2ca..014fe836c7009 100644 --- a/airbyte-workers/Dockerfile +++ b/airbyte-workers/Dockerfile @@ -27,7 +27,7 @@ RUN curl -fsSLo /usr/share/keyrings/kubernetes-archive-keyring.gpg https://packa RUN echo "deb [signed-by=/usr/share/keyrings/kubernetes-archive-keyring.gpg] https://apt.kubernetes.io/ kubernetes-xenial main" | tee /etc/apt/sources.list.d/kubernetes.list RUN apt-get update && apt-get install -y kubectl -ARG VERSION=0.38.2-alpha +ARG VERSION=0.38.3-alpha ENV APPLICATION airbyte-workers ENV VERSION ${VERSION} diff --git a/charts/airbyte/Chart.yaml b/charts/airbyte/Chart.yaml index 7ef9db404ad47..cfc84e6e767ed 100644 --- a/charts/airbyte/Chart.yaml +++ b/charts/airbyte/Chart.yaml @@ -21,7 +21,7 @@ version: 0.3.2 # incremented each time you make changes to the application. Versions are not expected to # follow Semantic Versioning. They should reflect the version the application is using. # It is recommended to use it with quotes. -appVersion: "0.38.2-alpha" +appVersion: "0.38.3-alpha" dependencies: - name: common diff --git a/charts/airbyte/README.md b/charts/airbyte/README.md index 284c5b85e9452..f91e3d80ab7d9 100644 --- a/charts/airbyte/README.md +++ b/charts/airbyte/README.md @@ -30,7 +30,7 @@ Helm charts for Airbyte. | `webapp.replicaCount` | Number of webapp replicas | `1` | | `webapp.image.repository` | The repository to use for the airbyte webapp image. | `airbyte/webapp` | | `webapp.image.pullPolicy` | the pull policy to use for the airbyte webapp image | `IfNotPresent` | -| `webapp.image.tag` | The airbyte webapp image tag. Defaults to the chart's AppVersion | `0.38.2-alpha` | +| `webapp.image.tag` | The airbyte webapp image tag. Defaults to the chart's AppVersion | `0.38.3-alpha` | | `webapp.podAnnotations` | Add extra annotations to the webapp pod(s) | `{}` | | `webapp.containerSecurityContext` | Security context for the container | `{}` | | `webapp.livenessProbe.enabled` | Enable livenessProbe on the webapp | `true` | @@ -72,7 +72,7 @@ Helm charts for Airbyte. | `scheduler.replicaCount` | Number of scheduler replicas | `1` | | `scheduler.image.repository` | The repository to use for the airbyte scheduler image. | `airbyte/scheduler` | | `scheduler.image.pullPolicy` | the pull policy to use for the airbyte scheduler image | `IfNotPresent` | -| `scheduler.image.tag` | The airbyte scheduler image tag. Defaults to the chart's AppVersion | `0.38.2-alpha` | +| `scheduler.image.tag` | The airbyte scheduler image tag. Defaults to the chart's AppVersion | `0.38.3-alpha` | | `scheduler.podAnnotations` | Add extra annotations to the scheduler pod | `{}` | | `scheduler.containerSecurityContext` | Security context for the container | `{}` | | `scheduler.livenessProbe.enabled` | Enable livenessProbe on the scheduler | `true` | @@ -135,7 +135,7 @@ Helm charts for Airbyte. | `server.replicaCount` | Number of server replicas | `1` | | `server.image.repository` | The repository to use for the airbyte server image. | `airbyte/server` | | `server.image.pullPolicy` | the pull policy to use for the airbyte server image | `IfNotPresent` | -| `server.image.tag` | The airbyte server image tag. Defaults to the chart's AppVersion | `0.38.2-alpha` | +| `server.image.tag` | The airbyte server image tag. Defaults to the chart's AppVersion | `0.38.3-alpha` | | `server.podAnnotations` | Add extra annotations to the server pod | `{}` | | `server.containerSecurityContext` | Security context for the container | `{}` | | `server.livenessProbe.enabled` | Enable livenessProbe on the server | `true` | @@ -170,7 +170,7 @@ Helm charts for Airbyte. | `worker.replicaCount` | Number of worker replicas | `1` | | `worker.image.repository` | The repository to use for the airbyte worker image. | `airbyte/worker` | | `worker.image.pullPolicy` | the pull policy to use for the airbyte worker image | `IfNotPresent` | -| `worker.image.tag` | The airbyte worker image tag. Defaults to the chart's AppVersion | `0.38.2-alpha` | +| `worker.image.tag` | The airbyte worker image tag. Defaults to the chart's AppVersion | `0.38.3-alpha` | | `worker.podAnnotations` | Add extra annotations to the worker pod(s) | `{}` | | `worker.containerSecurityContext` | Security context for the container | `{}` | | `worker.livenessProbe.enabled` | Enable livenessProbe on the worker | `true` | @@ -202,7 +202,7 @@ Helm charts for Airbyte. | ----------------------------- | -------------------------------------------------------------------- | -------------------- | | `bootloader.image.repository` | The repository to use for the airbyte bootloader image. | `airbyte/bootloader` | | `bootloader.image.pullPolicy` | the pull policy to use for the airbyte bootloader image | `IfNotPresent` | -| `bootloader.image.tag` | The airbyte bootloader image tag. Defaults to the chart's AppVersion | `0.38.2-alpha` | +| `bootloader.image.tag` | The airbyte bootloader image tag. Defaults to the chart's AppVersion | `0.38.3-alpha` | | `bootloader.podAnnotations` | Add extra annotations to the bootloader pod | `{}` | | `bootloader.nodeSelector` | Node labels for pod assignment | `{}` | | `bootloader.tolerations` | Tolerations for worker pod assignment. | `[]` | diff --git a/charts/airbyte/values.yaml b/charts/airbyte/values.yaml index 4f65af52cc6b9..122662954ee02 100644 --- a/charts/airbyte/values.yaml +++ b/charts/airbyte/values.yaml @@ -41,7 +41,7 @@ webapp: image: repository: airbyte/webapp pullPolicy: IfNotPresent - tag: 0.38.2-alpha + tag: 0.38.3-alpha ## @param webapp.podAnnotations [object] Add extra annotations to the webapp pod(s) ## @@ -207,7 +207,7 @@ scheduler: image: repository: airbyte/scheduler pullPolicy: IfNotPresent - tag: 0.38.2-alpha + tag: 0.38.3-alpha ## @param scheduler.podAnnotations [object] Add extra annotations to the scheduler pod ## @@ -438,7 +438,7 @@ server: image: repository: airbyte/server pullPolicy: IfNotPresent - tag: 0.38.2-alpha + tag: 0.38.3-alpha ## @param server.podAnnotations [object] Add extra annotations to the server pod ## @@ -565,7 +565,7 @@ worker: image: repository: airbyte/worker pullPolicy: IfNotPresent - tag: 0.38.2-alpha + tag: 0.38.3-alpha ## @param worker.podAnnotations [object] Add extra annotations to the worker pod(s) ## @@ -683,7 +683,7 @@ bootloader: image: repository: airbyte/bootloader pullPolicy: IfNotPresent - tag: 0.38.2-alpha + tag: 0.38.3-alpha ## @param bootloader.podAnnotations [object] Add extra annotations to the bootloader pod ## diff --git a/docs/operator-guides/upgrading-airbyte.md b/docs/operator-guides/upgrading-airbyte.md index bd9ed4ce925d9..982f8594ce0ae 100644 --- a/docs/operator-guides/upgrading-airbyte.md +++ b/docs/operator-guides/upgrading-airbyte.md @@ -103,7 +103,7 @@ If you are upgrading from (i.e. your current version of Airbyte is) Airbyte vers Here's an example of what it might look like with the values filled in. It assumes that the downloaded `airbyte_archive.tar.gz` is in `/tmp`. ```bash - docker run --rm -v /tmp:/config airbyte/migration:0.38.2-alpha --\ + docker run --rm -v /tmp:/config airbyte/migration:0.38.3-alpha --\ --input /config/airbyte_archive.tar.gz\ --output /config/airbyte_archive_migrated.tar.gz ``` diff --git a/kube/overlays/stable-with-resource-limits/.env b/kube/overlays/stable-with-resource-limits/.env index a706a1276a946..da72761187a38 100644 --- a/kube/overlays/stable-with-resource-limits/.env +++ b/kube/overlays/stable-with-resource-limits/.env @@ -1,4 +1,4 @@ -AIRBYTE_VERSION=0.38.2-alpha +AIRBYTE_VERSION=0.38.3-alpha # Airbyte Internal Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db DATABASE_HOST=airbyte-db-svc diff --git a/kube/overlays/stable-with-resource-limits/kustomization.yaml b/kube/overlays/stable-with-resource-limits/kustomization.yaml index 23045a8ddfc91..3fd8a6cec6a97 100644 --- a/kube/overlays/stable-with-resource-limits/kustomization.yaml +++ b/kube/overlays/stable-with-resource-limits/kustomization.yaml @@ -8,17 +8,17 @@ bases: images: - name: airbyte/db - newTag: 0.38.2-alpha + newTag: 0.38.3-alpha - name: airbyte/bootloader - newTag: 0.38.2-alpha + newTag: 0.38.3-alpha - name: airbyte/scheduler - newTag: 0.38.2-alpha + newTag: 0.38.3-alpha - name: airbyte/server - newTag: 0.38.2-alpha + newTag: 0.38.3-alpha - name: airbyte/webapp - newTag: 0.38.2-alpha + newTag: 0.38.3-alpha - name: airbyte/worker - newTag: 0.38.2-alpha + newTag: 0.38.3-alpha - name: temporalio/auto-setup newTag: 1.7.0 diff --git a/kube/overlays/stable/.env b/kube/overlays/stable/.env index 55aea55ddafd7..5bdcf89e8c63d 100644 --- a/kube/overlays/stable/.env +++ b/kube/overlays/stable/.env @@ -1,4 +1,4 @@ -AIRBYTE_VERSION=0.38.2-alpha +AIRBYTE_VERSION=0.38.3-alpha # Airbyte Internal Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db DATABASE_HOST=airbyte-db-svc diff --git a/kube/overlays/stable/kustomization.yaml b/kube/overlays/stable/kustomization.yaml index ac105343e6c99..7c827c944ec31 100644 --- a/kube/overlays/stable/kustomization.yaml +++ b/kube/overlays/stable/kustomization.yaml @@ -8,17 +8,17 @@ bases: images: - name: airbyte/db - newTag: 0.38.2-alpha + newTag: 0.38.3-alpha - name: airbyte/bootloader - newTag: 0.38.2-alpha + newTag: 0.38.3-alpha - name: airbyte/scheduler - newTag: 0.38.2-alpha + newTag: 0.38.3-alpha - name: airbyte/server - newTag: 0.38.2-alpha + newTag: 0.38.3-alpha - name: airbyte/webapp - newTag: 0.38.2-alpha + newTag: 0.38.3-alpha - name: airbyte/worker - newTag: 0.38.2-alpha + newTag: 0.38.3-alpha - name: temporalio/auto-setup newTag: 1.7.0 diff --git a/octavia-cli/Dockerfile b/octavia-cli/Dockerfile index 82139b526e988..120e3e8303614 100644 --- a/octavia-cli/Dockerfile +++ b/octavia-cli/Dockerfile @@ -14,5 +14,5 @@ USER octavia-cli WORKDIR /home/octavia-project ENTRYPOINT ["octavia"] -LABEL io.airbyte.version=0.38.2-alpha +LABEL io.airbyte.version=0.38.3-alpha LABEL io.airbyte.name=airbyte/octavia-cli diff --git a/octavia-cli/README.md b/octavia-cli/README.md index 9f63b84b83702..1eecae70b988c 100644 --- a/octavia-cli/README.md +++ b/octavia-cli/README.md @@ -105,7 +105,7 @@ This script: ```bash touch ~/.octavia # Create a file to store env variables that will be mapped the octavia-cli container mkdir my_octavia_project_directory # Create your octavia project directory where YAML configurations will be stored. -docker run --name octavia-cli -i --rm -v my_octavia_project_directory:/home/octavia-project --network host --user $(id -u):$(id -g) --env-file ~/.octavia airbyte/octavia-cli:0.38.2-alpha +docker run --name octavia-cli -i --rm -v my_octavia_project_directory:/home/octavia-project --network host --user $(id -u):$(id -g) --env-file ~/.octavia airbyte/octavia-cli:0.38.3-alpha ``` ### Using `docker-compose` diff --git a/octavia-cli/install.sh b/octavia-cli/install.sh index 130a21afdb1e4..be91808f748de 100755 --- a/octavia-cli/install.sh +++ b/octavia-cli/install.sh @@ -3,7 +3,7 @@ # This install scripts currently only works for ZSH and Bash profiles. # It creates an octavia alias in your profile bound to a docker run command and your current user. -VERSION=0.38.2-alpha +VERSION=0.38.3-alpha OCTAVIA_ENV_FILE=${HOME}/.octavia detect_profile() { diff --git a/octavia-cli/setup.py b/octavia-cli/setup.py index 8001e37c327be..0f29187d6daf8 100644 --- a/octavia-cli/setup.py +++ b/octavia-cli/setup.py @@ -15,7 +15,7 @@ setup( name="octavia-cli", - version="0.38.2", + version="0.38.3", description="A command line interface to manage Airbyte configurations", long_description=README, author="Airbyte",