From b16e13e9cf57ec2fc5ed48eab5595e603c9614bf Mon Sep 17 00:00:00 2001 From: VitaliiMaltsev <39538064+VitaliiMaltsev@users.noreply.github.com> Date: Wed, 27 Apr 2022 14:01:26 +0300 Subject: [PATCH 001/152] Redshift Destination: update spec (#12100) * Redshift Destination: update spec * update spec.json * update links in spec.json * added more links to spec.json | refactoring * updated docs with stadard connector template * added hyperlink to documentation for part_size field --- .../src/main/resources/spec.json | 28 ++-- docs/integrations/destinations/redshift.md | 158 ++++++++++-------- 2 files changed, 101 insertions(+), 85 deletions(-) diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-redshift/src/main/resources/spec.json index 360372f2ca8956..243259955ddf86 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/destination-redshift/src/main/resources/spec.json @@ -49,22 +49,22 @@ "title": "Default Schema" }, "s3_bucket_name": { - "title": "S3 Bucket Name", + "title": "S3 Bucket Name (Optional)", "type": "string", - "description": "The name of the staging S3 bucket to use if utilising a COPY strategy. COPY is recommended for production workloads for better speed and scalability. See AWS docs for more details.", + "description": "The name of the staging S3 bucket to use if utilising a COPY strategy. COPY is recommended for production workloads for better speed and scalability. See AWS docs for more details.", "examples": ["airbyte.staging"] }, "s3_bucket_path": { - "title": "S3 Bucket Path", + "title": "S3 Bucket Path (Optional)", "type": "string", - "description": "The directory under the S3 bucket where data will be written. If not provided, then defaults to the root directory.", + "description": "The directory under the S3 bucket where data will be written. If not provided, then defaults to the root directory. See path's name recommendations for more details.", "examples": ["data_sync/test"] }, "s3_bucket_region": { - "title": "S3 Bucket Region", + "title": "S3 Bucket Region (Optional)", "type": "string", "default": "", - "description": "The region of the S3 staging bucket to use if utilising a copy strategy.", + "description": "The region of the S3 staging bucket to use if utilising a COPY strategy. See AWS docs for details.", "enum": [ "", "us-east-1", @@ -94,14 +94,14 @@ }, "access_key_id": { "type": "string", - "description": "The Access Key Id granting allow one to access the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket.", - "title": "S3 Key Id", + "description": "This ID grants access to the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket. See AWS docs on how to generate an access key ID and secret access key.", + "title": "S3 Key Id (Optional)", "airbyte_secret": true }, "secret_access_key": { "type": "string", - "description": "The corresponding secret to the above access key id.", - "title": "S3 Access Key", + "description": "The corresponding secret to the above access key id. See AWS docs on how to generate an access key ID and secret access key.", + "title": "S3 Access Key (Optional)", "airbyte_secret": true }, "part_size": { @@ -109,13 +109,13 @@ "minimum": 10, "maximum": 100, "examples": ["10"], - "description": "Optional. Increase this if syncing tables larger than 100GB. Only relevant for COPY. Files are streamed to S3 in parts. This determines the size of each part, in MBs. As S3 has a limit of 10,000 parts per file, part size affects the table size. This is 10MB by default, resulting in a default limit of 100GB tables. Note, a larger part size will result in larger memory requirements. A rule of thumb is to multiply the part size by 10 to get the memory requirement. Modify this with care.", - "title": "Stream Part Size" + "description": "Increase this if syncing tables larger than 100GB. Only relevant for COPY. Files are streamed to S3 in parts. This determines the size of each part, in MBs. As S3 has a limit of 10,000 parts per file, part size affects the table size. This is 10MB by default, resulting in a default limit of 100GB tables. Note: a larger part size will result in larger memory requirements. A rule of thumb is to multiply the part size by 10 to get the memory requirement. Modify this with care. See docs for details.", + "title": "Stream Part Size (Optional)" }, "purge_staging_data": { - "title": "Purge Staging Files and Tables", + "title": "Purge Staging Files and Tables (Optional)", "type": "boolean", - "description": "Whether to delete the staging files from S3 after completing the sync. See the docs for details. Only relevant for COPY. Defaults to true.", + "description": "Whether to delete the staging files from S3 after completing the sync. See docs for details.", "default": true } } diff --git a/docs/integrations/destinations/redshift.md b/docs/integrations/destinations/redshift.md index 8339f8e7c8ecee..6dd424fdaa3db4 100644 --- a/docs/integrations/destinations/redshift.md +++ b/docs/integrations/destinations/redshift.md @@ -1,124 +1,139 @@ # Redshift -## Overview +This page guides you through the process of setting up the Redshift destination connector. + +## Prerequisites The Airbyte Redshift destination allows you to sync data to Redshift. This Redshift destination connector has two replication strategies: 1. INSERT: Replicates data via SQL INSERT queries. This is built on top of the destination-jdbc code base and is configured to rely on JDBC 4.2 standard drivers provided by Amazon via Mulesoft [here](https://mvnrepository.com/artifact/com.amazon.redshift/redshift-jdbc42) as described in Redshift documentation [here](https://docs.aws.amazon.com/redshift/latest/mgmt/jdbc20-install.html). **Not recommended for production workloads as this does not scale well**. -2. COPY: Replicates data by first uploading data to an S3 bucket and issuing a COPY command. This is the recommended loading approach described by Redshift [best practices](https://docs.aws.amazon.com/redshift/latest/dg/c_loading-data-best-practices.html). Requires an S3 bucket and credentials. - -Airbyte automatically picks an approach depending on the given configuration - if S3 configuration is present, Airbyte will use the COPY strategy and vice versa. - -We recommend users use INSERT for testing, to avoid any additional setup, and switch to COPY for production workloads. - -### Sync overview - -#### Output schema - -Each stream will be output into its own raw table in Redshift. Each table will contain 3 columns: - -* `_airbyte_ab_id`: a uuid assigned by Airbyte to each event that is processed. The column type in Redshift is `VARCHAR`. -* `_airbyte_emitted_at`: a timestamp representing when the event was pulled from the data source. The column type in Redshift is `TIMESTAMP WITH TIME ZONE`. -* `_airbyte_data`: a json blob representing with the event data. The column type in Redshift is `VARCHAR` but can be be parsed with JSON functions. - -#### Features -| Feature | Supported?\(Yes/No\) | Notes | -| :--- | :--- | :--- | -| Full Refresh Sync | Yes | | -| Incremental - Append Sync | Yes | | -| Incremental - Deduped History | Yes | | -| Namespaces | Yes | | -| SSL Support | Yes | | +For INSERT strategy: +* **Host** +* **Port** +* **Username** +* **Password** +* **Schema** +* **Database** + * This database needs to exist within the cluster provided. -#### Target Database +2. COPY: Replicates data by first uploading data to an S3 bucket and issuing a COPY command. This is the recommended loading approach described by Redshift [best practices](https://docs.aws.amazon.com/redshift/latest/dg/c_loading-data-best-practices.html). Requires an S3 bucket and credentials. -You will need to choose an existing database or create a new database that will be used to store synced data from Airbyte. +Airbyte automatically picks an approach depending on the given configuration - if S3 configuration is present, Airbyte will use the COPY strategy and vice versa. -## Getting started +For COPY strategy: -### Requirements +* **S3 Bucket Name** + * See [this](https://docs.aws.amazon.com/AmazonS3/latest/userguide/create-bucket-overview.html) to create an S3 bucket. +* **S3 Bucket Region** + * Place the S3 bucket and the Redshift cluster in the same region to save on networking costs. +* **Access Key Id** + * See [this](https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html#access-keys-and-secret-access-keys) on how to generate an access key. + * We recommend creating an Airbyte-specific user. This user will require [read and write permissions](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_examples_s3_rw-bucket.html) to objects in the staging bucket. +* **Secret Access Key** + * Corresponding key to the above key id. +* **Part Size** + * Affects the size limit of an individual Redshift table. Optional. Increase this if syncing tables larger than 100GB. Files are streamed to S3 in parts. This determines the size of each part, in MBs. As S3 has a limit of 10,000 parts per file, part size affects the table size. This is 10MB by default, resulting in a default table limit of 100GB. Note, a larger part size will result in larger memory requirements. A rule of thumb is to multiply the part size by 10 to get the memory requirement. Modify this with care. -1. Active Redshift cluster -2. Allow connections from Airbyte to your Redshift cluster \(if they exist in separate VPCs\) -3. A staging S3 bucket with credentials \(for the COPY strategy\). +Optional parameters: +* **Bucket Path** + * The directory within the S3 bucket to place the staging data. For example, if you set this to `yourFavoriteSubdirectory`, we will place the staging data inside `s3://yourBucket/yourFavoriteSubdirectory`. If not provided, defaults to the root directory. +* **Purge Staging Data** + * Whether to delete the staging files from S3 after completing the sync. Specifically, the connector will create CSV files named `bucketPath/namespace/streamName/syncDate_epochMillis_randomUuid.csv` containing three columns (`ab_id`, `data`, `emitted_at`). Normally these files are deleted after the `COPY` command completes; if you want to keep them for other purposes, set `purge_staging_data` to `false`. -:::info -Even if your Airbyte instance is running on a server in the same VPC as your Redshift cluster, you may need to place them in the **same security group** to allow connections between the two. +## Step 1: Set up Redshift -::: +1. [Log in](https://aws.amazon.com/console/) to AWS Management console. + If you don't have a AWS account already, you’ll need to [create](https://aws.amazon.com/premiumsupport/knowledge-center/create-and-activate-aws-account/) one in order to use the API. +2. Go to the AWS Redshift service +3. [Create](https://docs.aws.amazon.com/ses/latest/dg/event-publishing-redshift-cluster.html) and activate AWS Redshift cluster if you don't have one ready +4. (Optional) [Allow](https://aws.amazon.com/premiumsupport/knowledge-center/cannot-connect-redshift-cluster/) connections from Airbyte to your Redshift cluster \(if they exist in separate VPCs\) +5. (Optional) [Create](https://docs.aws.amazon.com/AmazonS3/latest/userguide/create-bucket-overview.html) a staging S3 bucket \(for the COPY strategy\). -### Setup guide +## Step 2: Set up the destination connector in Airbyte -#### 1. Make sure your cluster is active and accessible from the machine running Airbyte +**For Airbyte Cloud:** -This is dependent on your networking setup. The easiest way to verify if Airbyte is able to connect to your Redshift cluster is via the check connection tool in the UI. You can check AWS Redshift documentation with a tutorial on how to properly configure your cluster's access [here](https://docs.aws.amazon.com/redshift/latest/gsg/rs-gsg-authorize-cluster-access.html) +1. [Log into your Airbyte Cloud](https://cloud.airbyte.io/workspaces) account. +2. In the left navigation bar, click **Destinations**. In the top-right corner, click **+ new destination**. +3. On the destination setup page, select **Redshift** from the Destination type dropdown and enter a name for this connector. +4. Fill in all the required fields to use the INSERT or COPY strategy +5. Click `Set up destination`. -#### 2. Fill up connection info +**For Airbyte OSS:** -Next is to provide the necessary information on how to connect to your cluster such as the `host` whcih is part of the connection string or Endpoint accessible [here](https://docs.aws.amazon.com/redshift/latest/gsg/rs-gsg-connect-to-cluster.html#rs-gsg-how-to-get-connection-string) without the `port` and `database` name \(it typically includes the cluster-id, region and end with `.redshift.amazonaws.com`\). +1. Go to local Airbyte page. +2. In the left navigation bar, click **Destinations**. In the top-right corner, click **+ new destination**. +3. On the destination setup page, select **Redshift** from the Destination type dropdown and enter a name for this connector. +4. Fill in all the required fields to use the INSERT or COPY strategy +5. Click `Set up destination`. -You should have all the requirements needed to configure Redshift as a destination in the UI. You'll need the following information to configure the destination: -* **Host** -* **Port** -* **Username** -* **Password** -* **Schema** -* **Database** - * This database needs to exist within the cluster provided. +## Supported sync modes -#### 2a. Fill up S3 info \(for COPY strategy\) +The Redshift destination connector supports the following [sync modes](https://docs.airbyte.com/cloud/core-concepts/#connection-sync-mode): +- Full Refresh +- Incremental - Append Sync +- Incremental - Deduped History -Provide the required S3 info. +## Performance considerations -* **S3 Bucket Name** - * See [this](https://docs.aws.amazon.com/AmazonS3/latest/userguide/create-bucket-overview.html) to create an S3 bucket. -* **S3 Bucket Region** - * Place the S3 bucket and the Redshift cluster in the same region to save on networking costs. -* **Access Key Id** - * See [this](https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html#access-keys-and-secret-access-keys) on how to generate an access key. - * We recommend creating an Airbyte-specific user. This user will require [read and write permissions](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_examples_s3_rw-bucket.html) to objects in the staging bucket. -* **Secret Access Key** - * Corresponding key to the above key id. -* **Part Size** - * Affects the size limit of an individual Redshift table. Optional. Increase this if syncing tables larger than 100GB. Files are streamed to S3 in parts. This determines the size of each part, in MBs. As S3 has a limit of 10,000 parts per file, part size affects the table size. This is 10MB by default, resulting in a default table limit of 100GB. Note, a larger part size will result in larger memory requirements. A rule of thumb is to multiply the part size by 10 to get the memory requirement. Modify this with care. +Synchronization performance depends on the amount of data to be transferred. +Cluster scaling issues can be resolved directly using the cluster settings in the AWS Redshift console -Optional parameters: -* **Bucket Path** - * The directory within the S3 bucket to place the staging data. For example, if you set this to `yourFavoriteSubdirectory`, we will place the staging data inside `s3://yourBucket/yourFavoriteSubdirectory`. If not provided, defaults to the root directory. -* **Purge Staging Data** - * Whether to delete the staging files from S3 after completing the sync. Specifically, the connector will create CSV files named `bucketPath/namespace/streamName/syncDate_epochMillis_randomUuid.csv` containing three columns (`ab_id`, `data`, `emitted_at`). Normally these files are deleted after the `COPY` command completes; if you want to keep them for other purposes, set `purge_staging_data` to `false`. +## Connector-specific features & highlights -## Notes about Redshift Naming Conventions +### Notes about Redshift Naming Conventions From [Redshift Names & Identifiers](https://docs.aws.amazon.com/redshift/latest/dg/r_names.html): -### Standard Identifiers +#### Standard Identifiers * Begin with an ASCII single-byte alphabetic character or underscore character, or a UTF-8 multibyte character two to four bytes long. * Subsequent characters can be ASCII single-byte alphanumeric characters, underscores, or dollar signs, or UTF-8 multibyte characters two to four bytes long. * Be between 1 and 127 bytes in length, not including quotation marks for delimited identifiers. * Contain no quotation marks and no spaces. -### Delimited Identifiers +#### Delimited Identifiers Delimited identifiers \(also known as quoted identifiers\) begin and end with double quotation marks \("\). If you use a delimited identifier, you must use the double quotation marks for every reference to that object. The identifier can contain any standard UTF-8 printable characters other than the double quotation mark itself. Therefore, you can create column or table names that include otherwise illegal characters, such as spaces or the percent symbol. ASCII letters in delimited identifiers are case-insensitive and are folded to lowercase. To use a double quotation mark in a string, you must precede it with another double quotation mark character. Therefore, Airbyte Redshift destination will create tables and schemas using the Unquoted identifiers when possible or fallback to Quoted Identifiers if the names are containing special characters. -## Data Size Limitations +### Data Size Limitations Redshift specifies a maximum limit of 65535 bytes to store the raw JSON record data. Thus, when a row is too big to fit, the Redshift destination fails to load such data and currently ignores that record. See [docs](https://docs.aws.amazon.com/redshift/latest/dg/r_Character_types.html) -## Encryption +### Encryption All Redshift connections are encrypted using SSL +### Output schema + +Each stream will be output into its own raw table in Redshift. Each table will contain 3 columns: + +* `_airbyte_ab_id`: a uuid assigned by Airbyte to each event that is processed. The column type in Redshift is `VARCHAR`. +* `_airbyte_emitted_at`: a timestamp representing when the event was pulled from the data source. The column type in Redshift is `TIMESTAMP WITH TIME ZONE`. +* `_airbyte_data`: a json blob representing with the event data. The column type in Redshift is `VARCHAR` but can be be parsed with JSON functions. + +## Data type mapping + +| Redshift Type | Airbyte Type | Notes | +| :--- | :--- | :--- | +| `boolean` | `boolean` | | +| `int` | `integer` | | +| `float` | `number` | | +| `varchar` | `string` | | +| `date/varchar` | `date` | | +| `time/varchar` | `time` | | +| `timestamptz/varchar` | `timestamp_with_timezone` | | +| `varchar` | `array` | | +| `varchar` | `object` | | + ## Changelog | Version | Date | Pull Request | Subject | @@ -142,3 +157,4 @@ All Redshift connections are encrypted using SSL | 0.3.12 | 2021-07-21 | [3555](https://github.com/airbytehq/airbyte/pull/3555) | Enable partial checkpointing for halfway syncs | | 0.3.11 | 2021-07-20 | [4874](https://github.com/airbytehq/airbyte/pull/4874) | allow `additionalProperties` in connector spec | + From 9f577bb027199320066d7f69da0845f873ea974c Mon Sep 17 00:00:00 2001 From: noahkawasaki-airbyte <103465980+noahkawasaki-airbyte@users.noreply.github.com> Date: Wed, 27 Apr 2022 07:07:54 -0700 Subject: [PATCH 002/152] Update specs and definitions files for destination-postgres 0.3.19 (#12317) * Generate specs and definitions files after destination-postgres 0.3.19 * Bump destination-postgres-strict-encrypt to 0.1.5 --- .../src/main/resources/seed/destination_definitions.yaml | 2 +- .../init/src/main/resources/seed/destination_specs.yaml | 2 +- .../destination-postgres-strict-encrypt/Dockerfile | 2 +- .../src/test/resources/expected_spec.json | 6 ++++++ 4 files changed, 9 insertions(+), 3 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml index bcf75228f11348..e9f195a598225e 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml @@ -167,7 +167,7 @@ - name: Postgres destinationDefinitionId: 25c5221d-dce2-4163-ade9-739ef790f503 dockerRepository: airbyte/destination-postgres - dockerImageTag: 0.3.18 + dockerImageTag: 0.3.19 documentationUrl: https://docs.airbyte.io/integrations/destinations/postgres icon: postgresql.svg - name: Pulsar diff --git a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml index af9b9c4fb3d5ff..dfa8a93846274e 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml @@ -3047,7 +3047,7 @@ supported_destination_sync_modes: - "overwrite" - "append" -- dockerImage: "airbyte/destination-postgres:0.3.18" +- dockerImage: "airbyte/destination-postgres:0.3.19" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/postgres" connectionSpecification: diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/Dockerfile index 1c32dea0e209b0..0c472d5343f89d 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-postgres-strict-encrypt COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.4 +LABEL io.airbyte.version=0.1.5 LABEL io.airbyte.name=airbyte/destination-postgres-strict-encrypt diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test/resources/expected_spec.json b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test/resources/expected_spec.json index 10e1c1251a4402..8ba1678fcb554a 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test/resources/expected_spec.json +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test/resources/expected_spec.json @@ -54,6 +54,12 @@ "airbyte_secret": true, "order": 5 }, + "jdbc_url_params": { + "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).", + "title": "JDBC URL Params", + "type": "string", + "order": 7 + }, "tunnel_method": { "type": "object", "title": "SSH Tunnel Method", From b70a6fbd87aff1c960e35f25109942ca05daa4bd Mon Sep 17 00:00:00 2001 From: LiRen Tu Date: Wed, 27 Apr 2022 07:28:58 -0700 Subject: [PATCH 003/152] Format java code (#12401) --- .../databricks/DatabricksConstants.java | 3 +- .../destination/jdbc/SqlOperations.java | 47 ++++++++++--------- .../jdbc/copy/CopyConsumerFactory.java | 42 ++++++++--------- .../destination/jdbc/copy/StreamCopier.java | 1 + .../staging/StagingOperations.java | 2 +- .../LocalJsonDestinationAcceptanceTest.java | 1 - .../MariaDbTestDataComparator.java | 24 ++++++---- ...bColumnstoreDestinationAcceptanceTest.java | 5 +- .../MeiliSearchDestinationAcceptanceTest.java | 5 +- .../MongodbDestinationAcceptanceTest.java | 5 +- .../mqtt/MqttDestinationAcceptanceTest.java | 5 +- .../destination/oracle/OracleOperations.java | 22 ++++----- .../oracle/OracleTestDataComparator.java | 30 +++++++----- .../SshOracleDestinationAcceptanceTest.java | 4 +- ...ryptedOracleDestinationAcceptanceTest.java | 5 +- 15 files changed, 100 insertions(+), 101 deletions(-) diff --git a/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksConstants.java b/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksConstants.java index f3d014d63726bc..4a5c1a4b146af0 100644 --- a/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksConstants.java +++ b/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksConstants.java @@ -15,7 +15,6 @@ public class DatabricksConstants { "delta.autoOptimize.optimizeWrite = true", "delta.autoOptimize.autoCompact = true"); - private DatabricksConstants() { - } + private DatabricksConstants() {} } diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/SqlOperations.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/SqlOperations.java index 3d4eea93012b52..37212fcff9e1d1 100644 --- a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/SqlOperations.java +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/SqlOperations.java @@ -21,7 +21,7 @@ public interface SqlOperations { /** * Create a schema with provided name if it does not already exist. * - * @param database Database that the connector is syncing + * @param database Database that the connector is syncing * @param schemaName Name of schema. * @throws Exception exception */ @@ -30,7 +30,7 @@ public interface SqlOperations { /** * Denotes whether the schema exists in destination database * - * @param database Database that the connector is syncing + * @param database Database that the connector is syncing * @param schemaName Name of schema. * @return true if the schema exists in destination database, false if it doesn't */ @@ -41,9 +41,9 @@ default boolean isSchemaExists(final JdbcDatabase database, final String schemaN /** * Create a table with provided name in provided schema if it does not already exist. * - * @param database Database that the connector is syncing + * @param database Database that the connector is syncing * @param schemaName Name of schema - * @param tableName Name of table + * @param tableName Name of table * @throws Exception exception */ void createTableIfNotExists(JdbcDatabase database, String schemaName, String tableName) throws Exception; @@ -51,9 +51,9 @@ default boolean isSchemaExists(final JdbcDatabase database, final String schemaN /** * Query to create a table with provided name in provided schema if it does not already exist. * - * @param database Database that the connector is syncing + * @param database Database that the connector is syncing * @param schemaName Name of schema - * @param tableName Name of table + * @param tableName Name of table * @return query */ String createTableQuery(JdbcDatabase database, String schemaName, String tableName); @@ -62,7 +62,7 @@ default boolean isSchemaExists(final JdbcDatabase database, final String schemaN * Drop the table if it exists. * * @param schemaName Name of schema - * @param tableName Name of table + * @param tableName Name of table * @throws Exception exception */ void dropTableIfExists(JdbcDatabase database, String schemaName, String tableName) throws Exception; @@ -70,9 +70,9 @@ default boolean isSchemaExists(final JdbcDatabase database, final String schemaN /** * Query to remove all records from a table. Assumes the table exists. * - * @param database Database that the connector is syncing + * @param database Database that the connector is syncing * @param schemaName Name of schema - * @param tableName Name of table + * @param tableName Name of table * @return Query */ String truncateTableQuery(JdbcDatabase database, String schemaName, String tableName); @@ -80,20 +80,21 @@ default boolean isSchemaExists(final JdbcDatabase database, final String schemaN /** * Insert records into table. Assumes the table exists. * - * @param database Database that the connector is syncing - * @param records Records to insert. + * @param database Database that the connector is syncing + * @param records Records to insert. * @param schemaName Name of schema - * @param tableName Name of table + * @param tableName Name of table * @throws Exception exception */ void insertRecords(JdbcDatabase database, List records, String schemaName, String tableName) throws Exception; /** - * Query to copy all records from source table to destination table. Both tables must be in the specified schema. Assumes both table exist. + * Query to copy all records from source table to destination table. Both tables must be in the + * specified schema. Assumes both table exist. * - * @param database Database that the connector is syncing - * @param schemaName Name of schema - * @param sourceTableName Name of source table + * @param database Database that the connector is syncing + * @param schemaName Name of schema + * @param sourceTableName Name of source table * @param destinationTableName Name of destination table * @return Query */ @@ -103,7 +104,7 @@ default boolean isSchemaExists(final JdbcDatabase database, final String schemaN * Given an arbitrary number of queries, execute a transaction. * * @param database Database that the connector is syncing - * @param queries Queries to execute + * @param queries Queries to execute * @throws Exception exception */ void executeTransaction(JdbcDatabase database, List queries) throws Exception; @@ -120,19 +121,21 @@ default boolean isSchemaExists(final JdbcDatabase database, final String schemaN */ boolean isSchemaRequired(); - /** - * The method is responsible for executing some specific DB Engine logic in onClose method. We can override this method to execute specific logic - * e.g. to handle any necessary migrations in the destination, etc. + * The method is responsible for executing some specific DB Engine logic in onClose method. We can + * override this method to execute specific logic e.g. to handle any necessary migrations in the + * destination, etc. *

- * In next example you can see how migration from VARCHAR to SUPER column is handled for the Redshift destination: + * In next example you can see how migration from VARCHAR to SUPER column is handled for the + * Redshift destination: * * @param database - Database that the connector is interacting with - * @param schemaNames - schemas will be discovered + * @param schemaNames - schemas will be discovered * @see io.airbyte.integrations.destination.redshift.RedshiftSqlOperations#onDestinationCloseOperations */ default void onDestinationCloseOperations(JdbcDatabase database, Set schemaNames) { // do nothing LOGGER.info("No onDestinationCloseOperations required for this destination."); } + } diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/CopyConsumerFactory.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/CopyConsumerFactory.java index 9970402d67870f..2fb4d0b3bf3df1 100644 --- a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/CopyConsumerFactory.java +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/CopyConsumerFactory.java @@ -35,13 +35,13 @@ public class CopyConsumerFactory { private static final Logger LOGGER = LoggerFactory.getLogger(CopyConsumerFactory.class); public static AirbyteMessageConsumer create(final Consumer outputRecordCollector, - final JdbcDatabase database, - final SqlOperations sqlOperations, - final ExtendedNameTransformer namingResolver, - final T config, - final ConfiguredAirbyteCatalog catalog, - final StreamCopierFactory streamCopierFactory, - final String defaultSchema) { + final JdbcDatabase database, + final SqlOperations sqlOperations, + final ExtendedNameTransformer namingResolver, + final T config, + final ConfiguredAirbyteCatalog catalog, + final StreamCopierFactory streamCopierFactory, + final String defaultSchema) { final Map pairToCopier = createWriteConfigs( namingResolver, config, @@ -65,12 +65,12 @@ public static AirbyteMessageConsumer create(final Consumer o } private static Map createWriteConfigs(final ExtendedNameTransformer namingResolver, - final T config, - final ConfiguredAirbyteCatalog catalog, - final StreamCopierFactory streamCopierFactory, - final String defaultSchema, - final JdbcDatabase database, - final SqlOperations sqlOperations) { + final T config, + final ConfiguredAirbyteCatalog catalog, + final StreamCopierFactory streamCopierFactory, + final String defaultSchema, + final JdbcDatabase database, + final SqlOperations sqlOperations) { final Map pairToCopier = new HashMap<>(); final String stagingFolder = UUID.randomUUID().toString(); for (final var configuredStream : catalog.getStreams()) { @@ -89,8 +89,8 @@ private static OnStartFunction onStartFunction(final Map recordWriterFunction(final Map pairToCopier, - final SqlOperations sqlOperations, - final Map pairToIgnoredRecordCount) { + final SqlOperations sqlOperations, + final Map pairToIgnoredRecordCount) { return (AirbyteStreamNameNamespacePair pair, List records) -> { final var fileName = pairToCopier.get(pair).prepareStagingFile(); for (final AirbyteRecordMessage recordMessage : records) { @@ -117,9 +117,9 @@ private static CheckAndRemoveRecordWriter removeStagingFilePrinter(final Map pairToCopier, - final JdbcDatabase database, - final SqlOperations sqlOperations, - final Map pairToIgnoredRecordCount) { + final JdbcDatabase database, + final SqlOperations sqlOperations, + final Map pairToIgnoredRecordCount) { return (hasFailed) -> { pairToIgnoredRecordCount .forEach((pair, count) -> LOGGER.warn("A total of {} record(s) of data from stream {} were invalid and were ignored.", count, pair)); @@ -128,9 +128,9 @@ private static OnCloseFunction onCloseFunction(final Map pairToCopier, - boolean hasFailed, - final JdbcDatabase db, - final SqlOperations sqlOperations) + boolean hasFailed, + final JdbcDatabase db, + final SqlOperations sqlOperations) throws Exception { Exception firstException = null; List streamCopiers = new ArrayList<>(pairToCopier.values()); diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/StreamCopier.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/StreamCopier.java index 93eb78cadafc28..d655bea2f147e9 100644 --- a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/StreamCopier.java +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/StreamCopier.java @@ -77,4 +77,5 @@ public interface StreamCopier { * @return current staging file name */ String getCurrentFile(); + } diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/staging/StagingOperations.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/staging/StagingOperations.java index 5af382004d7536..e2a1b799e48ca9 100644 --- a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/staging/StagingOperations.java +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/staging/StagingOperations.java @@ -11,7 +11,7 @@ import java.util.UUID; import org.joda.time.DateTime; -public interface StagingOperations extends SqlOperations { +public interface StagingOperations extends SqlOperations { String getStageName(String namespace, String streamName); diff --git a/airbyte-integrations/connectors/destination-local-json/src/test-integration/java/io/airbyte/integrations/destination/local_json/LocalJsonDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-local-json/src/test-integration/java/io/airbyte/integrations/destination/local_json/LocalJsonDestinationAcceptanceTest.java index aa17f0a82513cc..63e7dd55d6c686 100644 --- a/airbyte-integrations/connectors/destination-local-json/src/test-integration/java/io/airbyte/integrations/destination/local_json/LocalJsonDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-local-json/src/test-integration/java/io/airbyte/integrations/destination/local_json/LocalJsonDestinationAcceptanceTest.java @@ -14,7 +14,6 @@ import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; - import java.nio.file.Files; import java.nio.file.Path; import java.util.List; diff --git a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariaDbTestDataComparator.java b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariaDbTestDataComparator.java index 45b6f093f5f94b..d10d4349dc23e7 100644 --- a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariaDbTestDataComparator.java +++ b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariaDbTestDataComparator.java @@ -1,22 +1,26 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + package io.airbyte.integrations.destination.mariadb_columnstore; import io.airbyte.integrations.destination.ExtendedNameTransformer; import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; - import java.util.ArrayList; import java.util.List; public class MariaDbTestDataComparator extends AdvancedTestDataComparator { - private final ExtendedNameTransformer namingResolver = new MariadbColumnstoreNameTransformer(); + private final ExtendedNameTransformer namingResolver = new MariadbColumnstoreNameTransformer(); + + @Override + protected List resolveIdentifier(final String identifier) { + final List result = new ArrayList<>(); + final String resolved = namingResolver.getIdentifier(identifier); + result.add(identifier); + result.add(resolved); - @Override - protected List resolveIdentifier(final String identifier) { - final List result = new ArrayList<>(); - final String resolved = namingResolver.getIdentifier(identifier); - result.add(identifier); - result.add(resolved); + return result; + } - return result; - } } diff --git a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestinationAcceptanceTest.java index 442e684de020cb..8098ab53ae4564 100644 --- a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestinationAcceptanceTest.java @@ -13,13 +13,10 @@ import io.airbyte.integrations.base.JavaBaseConstants; import io.airbyte.integrations.destination.ExtendedNameTransformer; import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; +import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import java.sql.SQLException; -import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; - -import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; -import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testcontainers.containers.MariaDBContainer; diff --git a/airbyte-integrations/connectors/destination-meilisearch/src/test-integration/java/io/airbyte/integrations/destination/meilisearch/MeiliSearchDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-meilisearch/src/test-integration/java/io/airbyte/integrations/destination/meilisearch/MeiliSearchDestinationAcceptanceTest.java index 66cd9a83e29bf5..bd94430bebdb1d 100644 --- a/airbyte-integrations/connectors/destination-meilisearch/src/test-integration/java/io/airbyte/integrations/destination/meilisearch/MeiliSearchDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-meilisearch/src/test-integration/java/io/airbyte/integrations/destination/meilisearch/MeiliSearchDestinationAcceptanceTest.java @@ -14,15 +14,14 @@ import io.airbyte.commons.stream.MoreStreams; import io.airbyte.commons.text.Names; import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; +import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; +import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.Comparator; import java.util.List; import java.util.stream.Collectors; - -import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; -import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import org.testcontainers.containers.GenericContainer; import org.testcontainers.utility.DockerImageName; diff --git a/airbyte-integrations/connectors/destination-mongodb/src/test-integration/java/io/airbyte/integrations/destination/mongodb/MongodbDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-mongodb/src/test-integration/java/io/airbyte/integrations/destination/mongodb/MongodbDestinationAcceptanceTest.java index ac93c4e54a5df5..a04fdd273ec6fc 100644 --- a/airbyte-integrations/connectors/destination-mongodb/src/test-integration/java/io/airbyte/integrations/destination/mongodb/MongodbDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-mongodb/src/test-integration/java/io/airbyte/integrations/destination/mongodb/MongodbDestinationAcceptanceTest.java @@ -12,11 +12,10 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.db.mongodb.MongoDatabase; import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; -import java.util.ArrayList; -import java.util.List; - import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; +import java.util.ArrayList; +import java.util.List; import org.bson.Document; import org.testcontainers.containers.MongoDBContainer; diff --git a/airbyte-integrations/connectors/destination-mqtt/src/test-integration/java/io/airbyte/integrations/destination/mqtt/MqttDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-mqtt/src/test-integration/java/io/airbyte/integrations/destination/mqtt/MqttDestinationAcceptanceTest.java index 8e216112d4622b..d4f9b381187c86 100644 --- a/airbyte-integrations/connectors/destination-mqtt/src/test-integration/java/io/airbyte/integrations/destination/mqtt/MqttDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-mqtt/src/test-integration/java/io/airbyte/integrations/destination/mqtt/MqttDestinationAcceptanceTest.java @@ -13,6 +13,8 @@ import com.hivemq.testcontainer.junit5.HiveMQTestContainerExtension; import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; +import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; +import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import java.net.InetAddress; import java.net.NetworkInterface; import java.net.SocketException; @@ -23,9 +25,6 @@ import java.util.List; import java.util.Map; import java.util.UUID; - -import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; -import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import org.eclipse.paho.client.mqttv3.MqttClient; import org.eclipse.paho.client.mqttv3.MqttConnectOptions; import org.eclipse.paho.client.mqttv3.MqttException; diff --git a/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleOperations.java b/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleOperations.java index b32cf07cbb4568..a133dfb5285f34 100644 --- a/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleOperations.java +++ b/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleOperations.java @@ -94,9 +94,9 @@ public String truncateTableQuery(final JdbcDatabase database, final String schem @Override public void insertRecords(final JdbcDatabase database, - final List records, - final String schemaName, - final String tempTableName) + final List records, + final String schemaName, + final String tempTableName) throws Exception { final String tableName = String.format("%s.%s", schemaName, tempTableName); final String columns = String.format("(%s, %s, %s)", @@ -107,11 +107,11 @@ public void insertRecords(final JdbcDatabase database, // Adapted from SqlUtils.insertRawRecordsInSingleQuery to meet some needs specific to Oracle syntax private static void insertRawRecordsInSingleQuery(final String tableName, - final String columns, - final String recordQueryComponent, - final JdbcDatabase jdbcDatabase, - final List records, - final Supplier uuidSupplier) + final String columns, + final String recordQueryComponent, + final JdbcDatabase jdbcDatabase, + final List records, + final Supplier uuidSupplier) throws SQLException { if (records.isEmpty()) { return; @@ -152,9 +152,9 @@ private static void insertRawRecordsInSingleQuery(final String tableName, @Override public String copyTableQuery(final JdbcDatabase database, - final String schemaName, - final String sourceTableName, - final String destinationTableName) { + final String schemaName, + final String sourceTableName, + final String destinationTableName) { return String.format("INSERT INTO %s.%s SELECT * FROM %s.%s\n", schemaName, destinationTableName, schemaName, sourceTableName); } diff --git a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/OracleTestDataComparator.java b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/OracleTestDataComparator.java index 5be791e75410a4..0ddb650fe2c6db 100644 --- a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/OracleTestDataComparator.java +++ b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/OracleTestDataComparator.java @@ -1,25 +1,29 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + package io.airbyte.integrations.destination.oracle; import io.airbyte.integrations.destination.ExtendedNameTransformer; import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; - import java.util.ArrayList; import java.util.List; public class OracleTestDataComparator extends AdvancedTestDataComparator { - private final ExtendedNameTransformer namingResolver = new OracleNameTransformer(); + private final ExtendedNameTransformer namingResolver = new OracleNameTransformer(); - @Override - protected List resolveIdentifier(final String identifier) { - final List result = new ArrayList<>(); - final String resolved = namingResolver.getIdentifier(identifier); - result.add(identifier); - result.add(resolved); - if (!resolved.startsWith("\"")) { - result.add(resolved.toLowerCase()); - result.add(resolved.toUpperCase()); - } - return result; + @Override + protected List resolveIdentifier(final String identifier) { + final List result = new ArrayList<>(); + final String resolved = namingResolver.getIdentifier(identifier); + result.add(identifier); + result.add(resolved); + if (!resolved.startsWith("\"")) { + result.add(resolved.toLowerCase()); + result.add(resolved.toUpperCase()); } + return result; + } + } diff --git a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/SshOracleDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/SshOracleDestinationAcceptanceTest.java index ee48ebcbcc7481..133a44263c2511 100644 --- a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/SshOracleDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/SshOracleDestinationAcceptanceTest.java @@ -17,13 +17,11 @@ import io.airbyte.integrations.base.ssh.SshTunnel; import io.airbyte.integrations.destination.ExtendedNameTransformer; import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; +import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import java.io.IOException; -import java.util.ArrayList; import java.util.List; import java.util.Objects; import java.util.stream.Collectors; - -import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import org.testcontainers.containers.Network; public abstract class SshOracleDestinationAcceptanceTest extends DestinationAcceptanceTest { diff --git a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/UnencryptedOracleDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/UnencryptedOracleDestinationAcceptanceTest.java index fd404bee795564..1342c57dafd375 100644 --- a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/UnencryptedOracleDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/UnencryptedOracleDestinationAcceptanceTest.java @@ -17,13 +17,10 @@ import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.integrations.destination.ExtendedNameTransformer; import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; +import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import java.sql.SQLException; -import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; - -import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; -import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import org.junit.Test; public class UnencryptedOracleDestinationAcceptanceTest extends DestinationAcceptanceTest { From eea6d1a95ed5d83715b96140a982c3439948a1a9 Mon Sep 17 00:00:00 2001 From: Serhii Lazebnyi <53845333+lazebnyi@users.noreply.github.com> Date: Wed, 27 Apr 2022 17:42:01 +0300 Subject: [PATCH 004/152] Source Instagram: Deleted read_insights scope from OAuth (#12344) * Deleted read_insights scope from oauth * Deleted read_insights scope from test --- .../io/airbyte/oauth/flows/facebook/InstagramOAuthFlow.java | 2 +- .../io/airbyte/oauth/flows/facebook/InstagramOAuthFlowTest.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/facebook/InstagramOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/facebook/InstagramOAuthFlow.java index c330b42980a6f7..48f72b58144a11 100644 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/facebook/InstagramOAuthFlow.java +++ b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/facebook/InstagramOAuthFlow.java @@ -12,7 +12,7 @@ // Instagram Graph API require Facebook API User token public class InstagramOAuthFlow extends FacebookMarketingOAuthFlow { - private static final String SCOPES = "ads_management,instagram_basic,instagram_manage_insights,read_insights"; + private static final String SCOPES = "ads_management,instagram_basic,instagram_manage_insights"; public InstagramOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { super(configRepository, httpClient); diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/facebook/InstagramOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/facebook/InstagramOAuthFlowTest.java index f4ed295a230056..31cb39bc935f98 100644 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/facebook/InstagramOAuthFlowTest.java +++ b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/facebook/InstagramOAuthFlowTest.java @@ -20,7 +20,7 @@ protected BaseOAuthFlow getOAuthFlow() { @Override protected String getExpectedConsentUrl() { - return "https://www.facebook.com/v12.0/dialog/oauth?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&state=state&scope=ads_management%2Cinstagram_basic%2Cinstagram_manage_insights%2Cread_insights"; + return "https://www.facebook.com/v12.0/dialog/oauth?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&state=state&scope=ads_management%2Cinstagram_basic%2Cinstagram_manage_insights"; } @Override From 3ece0c4774fdbf57ed472dc0e8c5ea945b5e60f8 Mon Sep 17 00:00:00 2001 From: Edmundo Ruiz Ghanem <168664+edmundito@users.noreply.github.com> Date: Wed, 27 Apr 2022 10:43:46 -0400 Subject: [PATCH 005/152] Replace DeleteModal with Confirmation Modal (#12275) When delete is confirmed, navigate away from route --- .../ConfirmationModal/ConfirmationModal.tsx | 4 +- .../components/DeleteBlock/DeleteBlock.tsx | 25 +++++++-- .../DeleteBlock/components/DeleteModal.tsx | 52 ------------------- .../ConfirmationModalService.tsx | 1 + 4 files changed, 24 insertions(+), 58 deletions(-) delete mode 100644 airbyte-webapp/src/components/DeleteBlock/components/DeleteModal.tsx diff --git a/airbyte-webapp/src/components/ConfirmationModal/ConfirmationModal.tsx b/airbyte-webapp/src/components/ConfirmationModal/ConfirmationModal.tsx index 2747f956b9aaf7..5bc37008896083 100644 --- a/airbyte-webapp/src/components/ConfirmationModal/ConfirmationModal.tsx +++ b/airbyte-webapp/src/components/ConfirmationModal/ConfirmationModal.tsx @@ -29,6 +29,7 @@ export interface ConfirmationModalProps { text: string; submitButtonText: string; onSubmit: () => void; + submitButtonDataId?: string; } export const ConfirmationModal: React.FC = ({ @@ -37,6 +38,7 @@ export const ConfirmationModal: React.FC = ({ text, onSubmit, submitButtonText, + submitButtonDataId, }) => ( }> @@ -45,7 +47,7 @@ export const ConfirmationModal: React.FC = ({ - diff --git a/airbyte-webapp/src/components/DeleteBlock/DeleteBlock.tsx b/airbyte-webapp/src/components/DeleteBlock/DeleteBlock.tsx index 4769a2fe519ff3..680cac1f929ec8 100644 --- a/airbyte-webapp/src/components/DeleteBlock/DeleteBlock.tsx +++ b/airbyte-webapp/src/components/DeleteBlock/DeleteBlock.tsx @@ -1,11 +1,12 @@ -import React, { useState } from "react"; +import React, { useCallback } from "react"; import { FormattedMessage } from "react-intl"; import styled from "styled-components"; import { Button, H5 } from "components"; import ContentCard from "components/ContentCard"; -import DeleteModal from "./components/DeleteModal"; +import { useConfirmationModalService } from "hooks/services/ConfirmationModal"; +import useRouter from "hooks/useRouter"; type IProps = { type: "source" | "destination" | "connection"; @@ -29,7 +30,22 @@ const Text = styled.div` `; const DeleteBlock: React.FC = ({ type, onDelete }) => { - const [isModalOpen, setIsModalOpen] = useState(false); + const { openConfirmationModal, closeConfirmationModal } = useConfirmationModalService(); + const { push } = useRouter(); + + const onDeleteButtonClick = useCallback(() => { + openConfirmationModal({ + text: `tables.${type}DeleteModalText`, + title: `tables.${type}DeleteConfirm`, + submitButtonText: "form.delete", + onSubmit: async () => { + await onDelete(); + closeConfirmationModal(); + push("../.."); + }, + submitButtonDataId: "delete", + }); + }, [closeConfirmationModal, onDelete, openConfirmationModal, push, type]); return ( <> @@ -40,11 +56,10 @@ const DeleteBlock: React.FC = ({ type, onDelete }) => { - - {isModalOpen && setIsModalOpen(false)} onSubmit={onDelete} />} ); }; diff --git a/airbyte-webapp/src/components/DeleteBlock/components/DeleteModal.tsx b/airbyte-webapp/src/components/DeleteBlock/components/DeleteModal.tsx deleted file mode 100644 index a8a4f36859ba09..00000000000000 --- a/airbyte-webapp/src/components/DeleteBlock/components/DeleteModal.tsx +++ /dev/null @@ -1,52 +0,0 @@ -import React from "react"; -import { FormattedMessage } from "react-intl"; -import { useMutation } from "react-query"; -import styled from "styled-components"; - -import { Button, LoadingButton } from "components"; -import Modal from "components/Modal"; -export type IProps = { - onClose: () => void; - onSubmit: () => Promise; - type: "source" | "destination" | "connection"; -}; - -const Content = styled.div` - width: 585px; - font-size: 14px; - line-height: 28px; - padding: 10px 40px 15px 37px; - white-space: pre-line; -`; - -const ButtonContent = styled.div` - padding-top: 28px; - display: flex; - justify-content: flex-end; -`; - -const ButtonWithMargin = styled(Button)` - margin-right: 12px; -`; - -const DeleteModal: React.FC = ({ onClose, onSubmit, type }) => { - const { isLoading, mutateAsync } = useMutation(() => onSubmit()); - - return ( - }> - - - - - - - mutateAsync()} data-id="delete"> - - - - - - ); -}; - -export default DeleteModal; diff --git a/airbyte-webapp/src/hooks/services/ConfirmationModal/ConfirmationModalService.tsx b/airbyte-webapp/src/hooks/services/ConfirmationModal/ConfirmationModalService.tsx index 6b4b18ceba19f2..089effe26b5f7d 100644 --- a/airbyte-webapp/src/hooks/services/ConfirmationModal/ConfirmationModalService.tsx +++ b/airbyte-webapp/src/hooks/services/ConfirmationModal/ConfirmationModalService.tsx @@ -64,6 +64,7 @@ export const ConfirmationModalService = ({ children }: { children: React.ReactNo text={state.confirmationModal.text} onSubmit={state.confirmationModal.onSubmit} submitButtonText={state.confirmationModal.submitButtonText} + submitButtonDataId={state.confirmationModal.submitButtonDataId} /> ) : null} From 7e1d95c783406cc2fff3ce3f767862933a094529 Mon Sep 17 00:00:00 2001 From: George Claireaux Date: Wed, 27 Apr 2022 16:18:22 +0100 Subject: [PATCH 006/152] =?UTF-8?q?=F0=9F=A4=96=20publish=20command:=20fix?= =?UTF-8?q?=20for=20version=20bumping=20multiple=20connectors=20in=20same?= =?UTF-8?q?=20PR=20=20(#12397)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * add a git pull from origin master before commit auto changes * dummy bump 2 connectors * pull from current branch not master * auto-bump connector version * dummy bump 2 connectors * revert faker bump * auto-bump connector version * better descript in changelog * dummy bump... again * commit first, then pull merge, then push * auto-bump connector version * auto-bump connector version Co-authored-by: Octavia Squidington III --- .github/workflows/publish-command.yml | 1 + .../init/src/main/resources/seed/source_definitions.yaml | 4 ++-- airbyte-config/init/src/main/resources/seed/source_specs.yaml | 4 ++-- .../connectors/source-apify-dataset/Dockerfile | 2 +- airbyte-integrations/connectors/source-openweather/Dockerfile | 2 +- docs/integrations/sources/apify-dataset.md | 1 + docs/integrations/sources/openweather.md | 1 + 7 files changed, 9 insertions(+), 6 deletions(-) diff --git a/.github/workflows/publish-command.yml b/.github/workflows/publish-command.yml index b7c0aaa175198a..e1590d173a2540 100644 --- a/.github/workflows/publish-command.yml +++ b/.github/workflows/publish-command.yml @@ -228,6 +228,7 @@ jobs: run: | git add -u git commit -m "auto-bump connector version" + git pull origin ${{ github.event.inputs.gitref }} git push origin ${{ github.event.inputs.gitref }} - name: Add Version Bump Success Comment if: github.event.inputs.comment-id && github.event.inputs.auto-bump-version == 'true' && success() diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index a021f9fce2f315..63dae61c6306c1 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -42,7 +42,7 @@ - name: Apify Dataset sourceDefinitionId: 47f17145-fe20-4ef5-a548-e29b048adf84 dockerRepository: airbyte/source-apify-dataset - dockerImageTag: 0.1.9 + dockerImageTag: 0.1.11 documentationUrl: https://docs.airbyte.io/integrations/sources/apify-dataset icon: apify.svg sourceType: api @@ -529,7 +529,7 @@ - name: OpenWeather sourceDefinitionId: d8540a80-6120-485d-b7d6-272bca477d9b dockerRepository: airbyte/source-openweather - dockerImageTag: 0.1.1 + dockerImageTag: 0.1.4 documentationUrl: https://docs.airbyte.io/integrations/sources/openweather sourceType: api - name: Oracle DB diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index d69af5d6f59990..0bb0de7c559417 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -511,7 +511,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-apify-dataset:0.1.9" +- dockerImage: "airbyte/source-apify-dataset:0.1.11" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/apify-dataset" connectionSpecification: @@ -5637,7 +5637,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-openweather:0.1.1" +- dockerImage: "airbyte/source-openweather:0.1.4" spec: documentationUrl: "https://docsurl.com" connectionSpecification: diff --git a/airbyte-integrations/connectors/source-apify-dataset/Dockerfile b/airbyte-integrations/connectors/source-apify-dataset/Dockerfile index fbf31c680e2f8e..3c25c0ce7cbd5c 100644 --- a/airbyte-integrations/connectors/source-apify-dataset/Dockerfile +++ b/airbyte-integrations/connectors/source-apify-dataset/Dockerfile @@ -12,5 +12,5 @@ RUN pip install . ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.9 +LABEL io.airbyte.version=0.1.11 LABEL io.airbyte.name=airbyte/source-apify-dataset diff --git a/airbyte-integrations/connectors/source-openweather/Dockerfile b/airbyte-integrations/connectors/source-openweather/Dockerfile index 8b82589d62f2af..b344b066bd4754 100644 --- a/airbyte-integrations/connectors/source-openweather/Dockerfile +++ b/airbyte-integrations/connectors/source-openweather/Dockerfile @@ -34,5 +34,5 @@ COPY source_openweather ./source_openweather ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.1 +LABEL io.airbyte.version=0.1.4 LABEL io.airbyte.name=airbyte/source-openweather diff --git a/docs/integrations/sources/apify-dataset.md b/docs/integrations/sources/apify-dataset.md index dbd4473494adc8..1390e4e3633997 100644 --- a/docs/integrations/sources/apify-dataset.md +++ b/docs/integrations/sources/apify-dataset.md @@ -43,6 +43,7 @@ The Apify dataset connector uses [Apify Python Client](https://docs.apify.com/ap | Version | Date | Pull Request | Subject | | :--- | :--- | :--- | :--- | +| 0.1.11 | 2022-04-27 | [12397](https://github.com/airbytehq/airbyte/pull/12397) | No changes. Used connector to test publish workflow changes. | | 0.1.9 | 2022-04-05 | [PR\#11712](https://github.com/airbytehq/airbyte/pull/11712) | No changes from 0.1.4. Used connector to test publish workflow changes. | | 0.1.4 | 2021-12-23 | [PR\#8434](https://github.com/airbytehq/airbyte/pull/8434) | Update fields in source-connectors specifications | | 0.1.2 | 2021-11-08 | [PR\#7499](https://github.com/airbytehq/airbyte/pull/7499) | Remove base-python dependencies | diff --git a/docs/integrations/sources/openweather.md b/docs/integrations/sources/openweather.md index dba899a2e39d21..8609d916597b3b 100644 --- a/docs/integrations/sources/openweather.md +++ b/docs/integrations/sources/openweather.md @@ -34,5 +34,6 @@ The free plan allows 60 calls per minute and 1,000,000 calls per month, you won' | Version | Date | Pull Request | Subject | | :--- | :--- | :--- | :--- | +| 0.1.4 | 2022-04-27 | [12397](https://github.com/airbytehq/airbyte/pull/12397) | No changes. Used connector to test publish workflow changes. | | 0.1.0 | 2021-10-27 | [7434](https://github.com/airbytehq/airbyte/pull/7434) | Initial release | From 1413aca478badc861a1ac580d79ff24503f432d3 Mon Sep 17 00:00:00 2001 From: Topher Lubaway Date: Wed, 27 Apr 2022 10:26:35 -0500 Subject: [PATCH 007/152] Toph deploy docs ssh repo check (#12323) * Check for ssh github repo because assumptions mostly. more clear fail messages for an unsupported workflow also removes the pipeline which now is redundant with local workflow * WIP1 * working test * More clear git test * less weird characters for grep * remove verbose for excho --- .github/workflows/documentation.yml | 37 ----------------------------- tools/bin/deploy_docusaurus | 11 +++++++++ 2 files changed, 11 insertions(+), 37 deletions(-) delete mode 100644 .github/workflows/documentation.yml diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml deleted file mode 100644 index 793172b2911a40..00000000000000 --- a/.github/workflows/documentation.yml +++ /dev/null @@ -1,37 +0,0 @@ -name: compile-docusaurus-static-assets - -on: - push: - branches: [master] - - # Allows you to run this workflow manually from the Actions tab - workflow_dispatch: - -# A workflow run is made up of one or more jobs that can run sequentially or in parallel -jobs: - deploy-docusaurus-to-docs-airbyte-io: - runs-on: ubuntu-latest - steps: - # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - - name: Check out repo - # `uses` taps GH ORG/REPO@version. - # "actions" is a default org for some common GH actions - uses: actions/checkout@v3 - with: - fetch-depth: 0 - # Node is required for yarn - - name: Set up Yarn - uses: actions/setup-node@v2 - with: - node-version: '16.13.0' - cache: 'yarn' - cache-dependency-path: docusaurus - # # Build Docusaurus website - # - name: Check for docusaurus changes not committed - # run: ./tools/bin/check_docusaurus_build_changes - # # Install and build Docusaurus website - # - name: Deploy docs to production (it's weird) - # run: ./tools/bin/deploy_docusaurus - # env: - # GITHUB_TOKEN: ${{ secrets.OCTAVIA_PAT }} - diff --git a/tools/bin/deploy_docusaurus b/tools/bin/deploy_docusaurus index 2f74f66cac9ce8..fb4c206b15d81a 100755 --- a/tools/bin/deploy_docusaurus +++ b/tools/bin/deploy_docusaurus @@ -17,6 +17,17 @@ else exit 1 fi +# if a string +if $(git remote get-url origin | grep --quiet "http"); then + set +o xtrace + echo -e "$red_text""This program requires a ssh-based github repo""$default_text" + echo -e "$red_text""https://docs.github.com/en/authentication/connecting-to-github-with-ssh/adding-a-new-ssh-key-to-your-github-account""$default_text" + echo -e "$red_text""You will need to change your remote to continue. Yell @topher for help""$default_text" + echo -e "$red_text""change your remote command:""$default_text" + echo -e "$red_text""git remote set-url origin git@github.com:airbytehq/airbyte.git""$default_text" + exit 1 +fi + # ------------- Start Main set +o xtrace From 45a212c4c5824cdbb9f4e673895cc56709009f26 Mon Sep 17 00:00:00 2001 From: Yurii Bidiuk <35812734+yurii-bidiuk@users.noreply.github.com> Date: Wed, 27 Apr 2022 18:52:33 +0300 Subject: [PATCH 008/152] =?UTF-8?q?=F0=9F=8E=89=20=20Source=20Snowflake:?= =?UTF-8?q?=20support=20oauth=20=20(#10953)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * add oauth flow to source-snowflake * Add unit test for oauth flow * add docs to method * format code * change configs * fixed remarks * fixed comments * fixed PR remark * update with master * format code * fix PR remmakrs * add test for backward compatibility * bump version * small fix for test * auto-bump connector version Co-authored-by: Octavia Squidington III --- .../resources/seed/source_definitions.yaml | 2 +- .../src/main/resources/seed/source_specs.yaml | 149 +++++++++++-- .../source/AbstractSourceConnectorTest.java | 7 + .../source/jdbc/AbstractJdbcSource.java | 2 +- .../connectors/source-snowflake/Dockerfile | 2 +- .../connectors/source-snowflake/README.md | 24 ++- .../connectors/source-snowflake/build.gradle | 1 + .../SnowflakeDataSourceUtils.java | 197 ++++++++++++++++++ .../SnowflakeSource.java | 88 +++++--- .../src/main/resources/spec.json | 170 ++++++++++++--- .../SnowflakeJdbcSourceAcceptanceTest.java | 20 +- .../SnowflakeSourceAcceptanceTest.java | 46 ++-- .../SnowflakeSourceAuthAcceptanceTest.java | 94 +++++++++ .../sources/SnowflakeSourceDatatypeTest.java | 4 +- .../SnowflakeDataSourceUtilsTest.java | 55 +++++ .../oauth/OAuthImplementationFactory.java | 1 + .../oauth/flows/SourceSnowflakeOAuthFlow.java | 144 +++++++++++++ .../oauth/flows/SnowflakeOAuthFlowTest.java | 82 ++++++++ docs/integrations/sources/snowflake.md | 28 +++ 19 files changed, 1014 insertions(+), 102 deletions(-) create mode 100644 airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeDataSourceUtils.java create mode 100644 airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceAuthAcceptanceTest.java create mode 100644 airbyte-integrations/connectors/source-snowflake/src/test/java/io/airbyte/integrations/source/snowflake/SnowflakeDataSourceUtilsTest.java create mode 100644 airbyte-oauth/src/main/java/io/airbyte/oauth/flows/SourceSnowflakeOAuthFlow.java create mode 100644 airbyte-oauth/src/test/java/io/airbyte/oauth/flows/SnowflakeOAuthFlowTest.java diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index 63dae61c6306c1..3ec994c8549164 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -751,7 +751,7 @@ - name: Snowflake sourceDefinitionId: e2d65910-8c8b-40a1-ae7d-ee2416b2bfa2 dockerRepository: airbyte/source-snowflake - dockerImageTag: 0.1.10 + dockerImageTag: 0.1.11 documentationUrl: https://docs.airbyte.io/integrations/sources/snowflake icon: snowflake.svg sourceType: database diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index 0bb0de7c559417..333e61f9bada49 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -7955,7 +7955,7 @@ - - "client_secret" oauthFlowOutputParameters: - - "refresh_token" -- dockerImage: "airbyte/source-snowflake:0.1.10" +- dockerImage: "airbyte/source-snowflake:0.1.11" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/snowflake" connectionSpecification: @@ -7968,10 +7968,77 @@ - "warehouse" - "database" - "schema" - - "username" - - "password" - additionalProperties: false + additionalProperties: true properties: + credentials: + title: "Authorization Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + order: 0 + required: + - "client_id" + - "client_secret" + - "auth_type" + properties: + auth_type: + type: "string" + const: "OAuth" + default: "OAuth" + order: 0 + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Snowflake developer application." + airbyte_secret: true + order: 1 + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your Snowflake developer application." + airbyte_secret: true + order: 2 + access_token: + type: "string" + title: "Access Token" + description: "Access Token for making authenticated requests." + airbyte_secret: true + order: 3 + refresh_token: + type: "string" + title: "Refresh Token" + description: "Refresh Token for making authenticated requests." + airbyte_secret: true + order: 4 + - title: "Username and Password" + type: "object" + required: + - "username" + - "password" + - "auth_type" + order: 1 + properties: + auth_type: + type: "string" + const: "username/password" + default: "username/password" + order: 0 + username: + description: "The username you created to allow Airbyte to access\ + \ the database." + examples: + - "AIRBYTE_USER" + type: "string" + title: "Username" + order: 1 + password: + description: "The password associated with the username." + type: "string" + airbyte_secret: true + title: "Password" + order: 2 + order: 0 host: description: "The host domain of the snowflake instance (must include the\ \ account, region, cloud environment, and end with snowflakecomputing.com)." @@ -7979,58 +8046,96 @@ - "accountname.us-east-2.aws.snowflakecomputing.com" type: "string" title: "Account Name" - order: 0 + order: 1 role: description: "The role you created for Airbyte to access Snowflake." examples: - "AIRBYTE_ROLE" type: "string" title: "Role" - order: 1 + order: 2 warehouse: description: "The warehouse you created for Airbyte to access data." examples: - "AIRBYTE_WAREHOUSE" type: "string" title: "Warehouse" - order: 2 + order: 3 database: description: "The database you created for Airbyte to access data." examples: - "AIRBYTE_DATABASE" type: "string" title: "Database" - order: 3 + order: 4 schema: description: "The source Snowflake schema tables." examples: - "AIRBYTE_SCHEMA" type: "string" title: "Schema" - order: 4 - username: - description: "The username you created to allow Airbyte to access the database." - examples: - - "AIRBYTE_USER" - type: "string" - title: "Username" order: 5 - password: - description: "The password associated with the username." - type: "string" - airbyte_secret: true - title: "Password" - order: 6 jdbc_url_params: description: "Additional properties to pass to the JDBC URL string when\ \ connecting to the database formatted as 'key=value' pairs separated\ \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." title: "JDBC URL Params" type: "string" - order: 7 + order: 6 supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] + advanced_auth: + auth_flow_type: "oauth2.0" + predicate_key: + - "credentials" + - "auth_type" + predicate_value: "OAuth" + oauth_config_specification: + oauth_user_input_from_connector_config_specification: + type: "object" + additionalProperties: false + properties: + host: + type: "string" + path_in_connector_config: + - "host" + complete_oauth_output_specification: + type: "object" + additionalProperties: false + properties: + access_token: + type: "string" + path_in_connector_config: + - "credentials" + - "access_token" + refresh_token: + type: "string" + path_in_connector_config: + - "credentials" + - "refresh_token" + complete_oauth_server_input_specification: + type: "object" + additionalProperties: false + properties: + client_id: + type: "string" + client_secret: + type: "string" + complete_oauth_server_output_specification: + type: "object" + additionalProperties: false + properties: + client_id: + type: "string" + path_in_connector_config: + - "credentials" + - "client_id" + client_secret: + type: "string" + path_in_connector_config: + - "credentials" + - "client_secret" - dockerImage: "airbyte/source-square:0.1.4" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/square" diff --git a/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/AbstractSourceConnectorTest.java b/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/AbstractSourceConnectorTest.java index 02aeaacb3fde86..9bc01f2f7208af 100644 --- a/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/AbstractSourceConnectorTest.java +++ b/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/AbstractSourceConnectorTest.java @@ -140,6 +140,13 @@ protected StandardCheckConnectionOutput runCheck() throws Exception { .run(new StandardCheckConnectionInput().withConnectionConfiguration(getConfig()), jobRoot); } + protected String runCheckAndGetStatusAsString(JsonNode config) throws Exception { + return new DefaultCheckConnectionWorker( + workerConfigs, + new AirbyteIntegrationLauncher(JOB_ID, JOB_ATTEMPT, getImageName(), processFactory, workerConfigs.getResourceRequirements())) + .run(new StandardCheckConnectionInput().withConnectionConfiguration(config), jobRoot).getStatus().toString(); + } + protected AirbyteCatalog runDiscover() throws Exception { return new DefaultDiscoverCatalogWorker( workerConfigs, diff --git a/airbyte-integrations/connectors/source-jdbc/src/main/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSource.java b/airbyte-integrations/connectors/source-jdbc/src/main/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSource.java index f85b4eebc0c55f..1d6c237a365967 100644 --- a/airbyte-integrations/connectors/source-jdbc/src/main/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSource.java +++ b/airbyte-integrations/connectors/source-jdbc/src/main/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSource.java @@ -289,7 +289,7 @@ public JdbcDatabase createDatabase(final JsonNode config) throws SQLException { final JsonNode jdbcConfig = toDatabaseConfig(config); final JdbcDatabase database = Databases.createStreamingJdbcDatabase( - jdbcConfig.get("username").asText(), + jdbcConfig.has("username") ? jdbcConfig.get("username").asText() : null, jdbcConfig.has("password") ? jdbcConfig.get("password").asText() : null, jdbcConfig.get("jdbc_url").asText(), driverClass, diff --git a/airbyte-integrations/connectors/source-snowflake/Dockerfile b/airbyte-integrations/connectors/source-snowflake/Dockerfile index 2756403f596095..f2d1461977b569 100644 --- a/airbyte-integrations/connectors/source-snowflake/Dockerfile +++ b/airbyte-integrations/connectors/source-snowflake/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-snowflake COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.10 +LABEL io.airbyte.version=0.1.11 LABEL io.airbyte.name=airbyte/source-snowflake diff --git a/airbyte-integrations/connectors/source-snowflake/README.md b/airbyte-integrations/connectors/source-snowflake/README.md index 124f7a6c5ccfc6..759a0a6493885f 100644 --- a/airbyte-integrations/connectors/source-snowflake/README.md +++ b/airbyte-integrations/connectors/source-snowflake/README.md @@ -13,10 +13,28 @@ "warehouse": "AIRBYTE_WAREHOUSE", "database": "AIRBYTE_DATABASE", "schema": "AIRBYTE_SCHEMA", - "username": "AIRBYTE_USER", - "password": "SOMEPASSWORD" + "credentails" { + "auth_type": "username/password", + "username": "AIRBYTE_USER", + "password": "SOMEPASSWORD" + } +} +``` +3. Create a file at `secrets/config_auth.json` with the following format: +``` +{ + "host": "ACCOUNT.REGION.PROVIDER.snowflakecomputing.com", + "role": "AIRBYTE_ROLE", + "warehouse": "AIRBYTE_WAREHOUSE", + "database": "AIRBYTE_DATABASE", + "schema": "AIRBYTE_SCHEMA", + "credentails" { + "auth_type": "OAuth", + "client_id": "client_id", + "client_secret": "client_secret", + "refresh_token": "refresh_token" + } } ``` - ## For Airbyte employees Put the contents of the `Snowflake Insert Test Creds` secret on Lastpass into `secrets/config.json` to be able to run integration tests locally. diff --git a/airbyte-integrations/connectors/source-snowflake/build.gradle b/airbyte-integrations/connectors/source-snowflake/build.gradle index 84f73f77e1722d..c641b62056d62b 100644 --- a/airbyte-integrations/connectors/source-snowflake/build.gradle +++ b/airbyte-integrations/connectors/source-snowflake/build.gradle @@ -17,6 +17,7 @@ dependencies { implementation project(':airbyte-protocol:models') implementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) implementation group: 'net.snowflake', name: 'snowflake-jdbc', version: '3.13.9' + implementation 'com.zaxxer:HikariCP:5.0.1' testImplementation testFixtures(project(':airbyte-integrations:connectors:source-jdbc')) testImplementation project(':airbyte-test-utils') diff --git a/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeDataSourceUtils.java b/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeDataSourceUtils.java new file mode 100644 index 00000000000000..9d035194311637 --- /dev/null +++ b/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeDataSourceUtils.java @@ -0,0 +1,197 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.snowflake; + +import static java.util.stream.Collectors.joining; + +import com.fasterxml.jackson.databind.JsonNode; +import com.zaxxer.hikari.HikariConfig; +import com.zaxxer.hikari.HikariDataSource; +import io.airbyte.commons.json.Jsons; +import java.io.IOException; +import java.net.URI; +import java.net.URLEncoder; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpRequest.BodyPublisher; +import java.net.http.HttpRequest.BodyPublishers; +import java.net.http.HttpResponse; +import java.nio.charset.StandardCharsets; +import java.time.Duration; +import java.util.Base64; +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; +import java.util.concurrent.TimeUnit; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class SnowflakeDataSourceUtils { + + public static final String OAUTH_METHOD = "OAuth"; + public static final String USERNAME_PASSWORD_METHOD = "username/password"; + public static final String UNRECOGNIZED = "Unrecognized"; + + private static final Logger LOGGER = LoggerFactory.getLogger(SnowflakeDataSourceUtils.class); + private static final int PAUSE_BETWEEN_TOKEN_REFRESH_MIN = 7; // snowflake access token's TTL is 10min and can't be modified + private static final String REFRESH_TOKEN_URL = "https://%s/oauth/token-request"; + private static final HttpClient httpClient = HttpClient.newBuilder() + .version(HttpClient.Version.HTTP_2) + .connectTimeout(Duration.ofSeconds(10)) + .build(); + + /** + * Snowflake OAuth access token expires in 10 minutes. For the cases when sync duration is more than + * 10 min, it requires updating 'token' property after the start of connection pool. + * HikariDataSource brings support for this requirement. + * + * @param config source config JSON + * @return datasource + */ + public static HikariDataSource createDataSource(final JsonNode config) { + HikariDataSource dataSource = new HikariDataSource(); + dataSource.setJdbcUrl(buildJDBCUrl(config)); + + if (config.has("credentials")) { + JsonNode credentials = config.get("credentials"); + final String authType = credentials.has("auth_type") ? credentials.get("auth_type").asText() : UNRECOGNIZED; + switch (authType) { + case OAUTH_METHOD -> { + LOGGER.info("Authorization mode is OAuth"); + dataSource.setDataSourceProperties(buildAuthProperties(config)); + // thread to keep the refresh token up to date + SnowflakeSource.SCHEDULED_EXECUTOR_SERVICE.scheduleAtFixedRate( + getAccessTokenTask(dataSource), + PAUSE_BETWEEN_TOKEN_REFRESH_MIN, PAUSE_BETWEEN_TOKEN_REFRESH_MIN, TimeUnit.MINUTES); + } + case USERNAME_PASSWORD_METHOD -> { + LOGGER.info("Authorization mode is 'Username and password'"); + populateUsernamePasswordConfig(dataSource, config.get("credentials")); + } + default -> throw new IllegalArgumentException("Unrecognized auth type: " + authType); + } + } else { + LOGGER.info("Authorization mode is deprecated 'Username and password'. Please update your source configuration"); + populateUsernamePasswordConfig(dataSource, config); + } + + return dataSource; + } + + /** + * Method to make request for a new access token using refresh token and client credentials. + * + * @return access token + */ + public static String getAccessTokenUsingRefreshToken(final String hostName, + final String clientId, + final String clientSecret, + final String refreshToken) + throws IOException { + final var refreshTokenUri = String.format(REFRESH_TOKEN_URL, hostName); + final Map requestBody = new HashMap<>(); + requestBody.put("grant_type", "refresh_token"); + requestBody.put("refresh_token", refreshToken); + + try { + final BodyPublisher bodyPublisher = BodyPublishers.ofString(requestBody.keySet().stream() + .map(key -> key + "=" + URLEncoder.encode(requestBody.get(key), StandardCharsets.UTF_8)) + .collect(joining("&"))); + + final byte[] authorization = Base64.getEncoder() + .encode((clientId + ":" + clientSecret).getBytes(StandardCharsets.UTF_8)); + + final HttpRequest request = HttpRequest.newBuilder() + .POST(bodyPublisher) + .uri(URI.create(refreshTokenUri)) + .header("Content-Type", "application/x-www-form-urlencoded") + .header("Accept", "application/json") + .header("Authorization", "Basic " + new String(authorization, StandardCharsets.UTF_8)) + .build(); + + final HttpResponse response = httpClient.send(request, + HttpResponse.BodyHandlers.ofString()); + + final JsonNode jsonResponse = Jsons.deserialize(response.body()); + if (jsonResponse.has("access_token")) { + return jsonResponse.get("access_token").asText(); + } else { + LOGGER.error("Failed to obtain accessToken using refresh token. " + jsonResponse); + throw new RuntimeException( + "Failed to obtain accessToken using refresh token."); + } + } catch (final InterruptedException e) { + throw new IOException("Failed to refreshToken", e); + } + } + + public static String buildJDBCUrl(JsonNode config) { + final StringBuilder jdbcUrl = new StringBuilder(String.format("jdbc:snowflake://%s/?", + config.get("host").asText())); + + // Add required properties + jdbcUrl.append(String.format( + "role=%s&warehouse=%s&database=%s&schema=%s&JDBC_QUERY_RESULT_FORMAT=%s&CLIENT_SESSION_KEEP_ALIVE=%s", + config.get("role").asText(), + config.get("warehouse").asText(), + config.get("database").asText(), + config.get("schema").asText(), + // Needed for JDK17 - see + // https://stackoverflow.com/questions/67409650/snowflake-jdbc-driver-internal-error-fail-to-retrieve-row-count-for-first-arrow + "JSON", + true)); + + // https://docs.snowflake.com/en/user-guide/jdbc-configure.html#jdbc-driver-connection-string + if (config.has("jdbc_url_params")) { + jdbcUrl.append("&").append(config.get("jdbc_url_params").asText()); + } + return jdbcUrl.toString(); + } + + private static Runnable getAccessTokenTask(final HikariDataSource dataSource) { + return () -> { + LOGGER.info("Refresh token process started"); + var props = dataSource.getDataSourceProperties(); + try { + var token = getAccessTokenUsingRefreshToken(props.getProperty("host"), + props.getProperty("client_id"), props.getProperty("client_secret"), + props.getProperty("refresh_token")); + props.setProperty("token", token); + dataSource.setDataSourceProperties(props); + LOGGER.info("New access token has been obtained"); + } catch (IOException e) { + LOGGER.error("Failed to obtain a fresh accessToken:" + e); + } + }; + } + + public static Properties buildAuthProperties(JsonNode config) { + Properties properties = new Properties(); + try { + var credentials = config.get("credentials"); + properties.setProperty("client_id", credentials.get("client_id").asText()); + properties.setProperty("client_secret", credentials.get("client_secret").asText()); + properties.setProperty("refresh_token", credentials.get("refresh_token").asText()); + properties.setProperty("host", config.get("host").asText()); + properties.put("authenticator", "oauth"); + properties.put("account", config.get("host").asText()); + + String accessToken = getAccessTokenUsingRefreshToken( + config.get("host").asText(), credentials.get("client_id").asText(), + credentials.get("client_secret").asText(), credentials.get("refresh_token").asText()); + + properties.put("token", accessToken); + } catch (IOException e) { + LOGGER.error("Request access token was failed with error" + e.getMessage()); + } + return properties; + } + + private static void populateUsernamePasswordConfig(HikariConfig hikariConfig, JsonNode config) { + hikariConfig.setUsername(config.get("username").asText()); + hikariConfig.setPassword(config.get("password").asText()); + } + +} diff --git a/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeSource.java b/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeSource.java index b404e4fc3b3b3c..33fe4f4346718e 100644 --- a/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeSource.java +++ b/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeSource.java @@ -4,14 +4,25 @@ package io.airbyte.integrations.source.snowflake; +import static io.airbyte.integrations.source.snowflake.SnowflakeDataSourceUtils.OAUTH_METHOD; +import static io.airbyte.integrations.source.snowflake.SnowflakeDataSourceUtils.UNRECOGNIZED; +import static io.airbyte.integrations.source.snowflake.SnowflakeDataSourceUtils.USERNAME_PASSWORD_METHOD; + import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; +import io.airbyte.db.jdbc.JdbcDatabase; +import io.airbyte.db.jdbc.StreamingJdbcDatabase; import io.airbyte.integrations.base.IntegrationRunner; import io.airbyte.integrations.base.Source; import io.airbyte.integrations.source.jdbc.AbstractJdbcSource; +import java.io.IOException; import java.sql.JDBCType; +import java.sql.SQLException; import java.util.Set; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import javax.sql.DataSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -19,54 +30,79 @@ public class SnowflakeSource extends AbstractJdbcSource implements Sou private static final Logger LOGGER = LoggerFactory.getLogger(SnowflakeSource.class); public static final String DRIVER_CLASS = "net.snowflake.client.jdbc.SnowflakeDriver"; + public static final ScheduledExecutorService SCHEDULED_EXECUTOR_SERVICE = Executors.newScheduledThreadPool(1); public SnowflakeSource() { - super(DRIVER_CLASS, new SnowflakeJdbcStreamingQueryConfiguration(), new SnowflakeSourceOperations()); + super(DRIVER_CLASS, new SnowflakeJdbcStreamingQueryConfiguration(), + new SnowflakeSourceOperations()); } public static void main(final String[] args) throws Exception { final Source source = new SnowflakeSource(); LOGGER.info("starting source: {}", SnowflakeSource.class); new IntegrationRunner(source).run(args); + SCHEDULED_EXECUTOR_SERVICE.shutdownNow(); LOGGER.info("completed source: {}", SnowflakeSource.class); } + @Override + public JdbcDatabase createDatabase(JsonNode config) throws SQLException { + final DataSource dataSource = SnowflakeDataSourceUtils.createDataSource(config); + var database = new StreamingJdbcDatabase(dataSource, new SnowflakeSourceOperations(), + new SnowflakeJdbcStreamingQueryConfiguration()); + quoteString = database.getMetaData().getIdentifierQuoteString(); + return database; + } + @Override public JsonNode toDatabaseConfig(final JsonNode config) { + final String jdbcUrl = SnowflakeDataSourceUtils.buildJDBCUrl(config); - final StringBuilder jdbcUrl = new StringBuilder(String.format("jdbc:snowflake://%s/?", - config.get("host").asText())); + if (config.has("credentials")) { + JsonNode credentials = config.get("credentials"); + final String authType = + credentials.has("auth_type") ? credentials.get("auth_type").asText() : UNRECOGNIZED; + return switch (authType) { + case OAUTH_METHOD -> buildOAuthConfig(config, jdbcUrl); + case USERNAME_PASSWORD_METHOD -> buildUsernamePasswordConfig(config.get("credentials"), + jdbcUrl); + default -> throw new IllegalArgumentException("Unrecognized auth type: " + authType); + }; + } else { + return buildUsernamePasswordConfig(config, jdbcUrl); + } + } - // Add required properties - jdbcUrl.append(String.format("role=%s&warehouse=%s&database=%s&schema=%s&JDBC_QUERY_RESULT_FORMAT=%s&CLIENT_SESSION_KEEP_ALIVE=%s", - config.get("role").asText(), - config.get("warehouse").asText(), - config.get("database").asText(), - config.get("schema").asText(), - // Needed for JDK17 - see - // https://stackoverflow.com/questions/67409650/snowflake-jdbc-driver-internal-error-fail-to-retrieve-row-count-for-first-arrow - "JSON", - true)); + @Override + public Set getExcludedInternalNameSpaces() { + return Set.of( + "INFORMATION_SCHEMA"); + } - // https://docs.snowflake.com/en/user-guide/jdbc-configure.html#jdbc-driver-connection-string - if (config.has("jdbc_url_params")) { - jdbcUrl.append("&").append(config.get("jdbc_url_params").asText()); + private JsonNode buildOAuthConfig(JsonNode config, String jdbcUrl) { + final String accessToken; + var credentials = config.get("credentials"); + try { + accessToken = SnowflakeDataSourceUtils.getAccessTokenUsingRefreshToken( + config.get("host").asText(), credentials.get("client_id").asText(), + credentials.get("client_secret").asText(), credentials.get("refresh_token").asText()); + } catch (IOException e) { + throw new RuntimeException(e); } + final ImmutableMap.Builder configBuilder = ImmutableMap.builder() + .put("connection_properties", + String.join(";", "authenticator=oauth", "token=" + accessToken)) + .put("jdbc_url", jdbcUrl); + return Jsons.jsonNode(configBuilder.build()); + } - LOGGER.info(jdbcUrl.toString()); - + private JsonNode buildUsernamePasswordConfig(JsonNode config, String jdbcUrl) { final ImmutableMap.Builder configBuilder = ImmutableMap.builder() .put("username", config.get("username").asText()) .put("password", config.get("password").asText()) - .put("jdbc_url", jdbcUrl.toString()); - + .put("jdbc_url", jdbcUrl); + LOGGER.info(jdbcUrl); return Jsons.jsonNode(configBuilder.build()); } - @Override - public Set getExcludedInternalNameSpaces() { - return Set.of( - "INFORMATION_SCHEMA"); - } - } diff --git a/airbyte-integrations/connectors/source-snowflake/src/main/resources/spec.json b/airbyte-integrations/connectors/source-snowflake/src/main/resources/spec.json index 95b989811537cb..689926366c68eb 100644 --- a/airbyte-integrations/connectors/source-snowflake/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/source-snowflake/src/main/resources/spec.json @@ -4,71 +4,183 @@ "$schema": "http://json-schema.org/draft-07/schema#", "title": "Snowflake Source Spec", "type": "object", - "required": [ - "host", - "role", - "warehouse", - "database", - "schema", - "username", - "password" - ], - "additionalProperties": false, + "required": ["host", "role", "warehouse", "database", "schema"], + "additionalProperties": true, "properties": { + "credentials": { + "title": "Authorization Method", + "type": "object", + "oneOf": [ + { + "type": "object", + "title": "OAuth2.0", + "order": 0, + "required": ["client_id", "client_secret", "auth_type"], + "properties": { + "auth_type": { + "type": "string", + "const": "OAuth", + "default": "OAuth", + "order": 0 + }, + "client_id": { + "type": "string", + "title": "Client ID", + "description": "The Client ID of your Snowflake developer application.", + "airbyte_secret": true, + "order": 1 + }, + "client_secret": { + "type": "string", + "title": "Client Secret", + "description": "The Client Secret of your Snowflake developer application.", + "airbyte_secret": true, + "order": 2 + }, + "access_token": { + "type": "string", + "title": "Access Token", + "description": "Access Token for making authenticated requests.", + "airbyte_secret": true, + "order": 3 + }, + "refresh_token": { + "type": "string", + "title": "Refresh Token", + "description": "Refresh Token for making authenticated requests.", + "airbyte_secret": true, + "order": 4 + } + } + }, + { + "title": "Username and Password", + "type": "object", + "required": ["username", "password", "auth_type"], + "order": 1, + "properties": { + "auth_type": { + "type": "string", + "const": "username/password", + "default": "username/password", + "order": 0 + }, + "username": { + "description": "The username you created to allow Airbyte to access the database.", + "examples": ["AIRBYTE_USER"], + "type": "string", + "title": "Username", + "order": 1 + }, + "password": { + "description": "The password associated with the username.", + "type": "string", + "airbyte_secret": true, + "title": "Password", + "order": 2 + } + } + } + ], + "order": 0 + }, "host": { "description": "The host domain of the snowflake instance (must include the account, region, cloud environment, and end with snowflakecomputing.com).", "examples": ["accountname.us-east-2.aws.snowflakecomputing.com"], "type": "string", "title": "Account Name", - "order": 0 + "order": 1 }, "role": { "description": "The role you created for Airbyte to access Snowflake.", "examples": ["AIRBYTE_ROLE"], "type": "string", "title": "Role", - "order": 1 + "order": 2 }, "warehouse": { "description": "The warehouse you created for Airbyte to access data.", "examples": ["AIRBYTE_WAREHOUSE"], "type": "string", "title": "Warehouse", - "order": 2 + "order": 3 }, "database": { "description": "The database you created for Airbyte to access data.", "examples": ["AIRBYTE_DATABASE"], "type": "string", "title": "Database", - "order": 3 + "order": 4 }, "schema": { "description": "The source Snowflake schema tables.", "examples": ["AIRBYTE_SCHEMA"], "type": "string", "title": "Schema", - "order": 4 - }, - "username": { - "description": "The username you created to allow Airbyte to access the database.", - "examples": ["AIRBYTE_USER"], - "type": "string", - "title": "Username", "order": 5 }, - "password": { - "description": "The password associated with the username.", - "type": "string", - "airbyte_secret": true, - "title": "Password", - "order": 6 - }, "jdbc_url_params": { "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).", "title": "JDBC URL Params", "type": "string", - "order": 7 + "order": 6 + } + } + }, + "advanced_auth": { + "auth_flow_type": "oauth2.0", + "predicate_key": ["credentials", "auth_type"], + "predicate_value": "OAuth", + "oauth_config_specification": { + "oauth_user_input_from_connector_config_specification": { + "type": "object", + "additionalProperties": false, + "properties": { + "host": { + "type": "string", + "path_in_connector_config": ["host"] + } + } + }, + "complete_oauth_output_specification": { + "type": "object", + "additionalProperties": false, + "properties": { + "access_token": { + "type": "string", + "path_in_connector_config": ["credentials", "access_token"] + }, + "refresh_token": { + "type": "string", + "path_in_connector_config": ["credentials", "refresh_token"] + } + } + }, + "complete_oauth_server_input_specification": { + "type": "object", + "additionalProperties": false, + "properties": { + "client_id": { + "type": "string" + }, + "client_secret": { + "type": "string" + } + } + }, + "complete_oauth_server_output_specification": { + "type": "object", + "additionalProperties": false, + "properties": { + "client_id": { + "type": "string", + "path_in_connector_config": ["credentials", "client_id"] + }, + "client_secret": { + "type": "string", + "path_in_connector_config": ["credentials", "client_secret"] + } + } } } } diff --git a/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeJdbcSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeJdbcSourceAcceptanceTest.java index 011c4aad414b3a..9c81721ebc702e 100644 --- a/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeJdbcSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeJdbcSourceAcceptanceTest.java @@ -4,19 +4,25 @@ package io.airbyte.integrations.io.airbyte.integration_tests.sources; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableSet; import io.airbyte.commons.io.IOs; import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.source.jdbc.AbstractJdbcSource; import io.airbyte.integrations.source.jdbc.test.JdbcSourceAcceptanceTest; import io.airbyte.integrations.source.snowflake.SnowflakeSource; +import io.airbyte.protocol.models.AirbyteConnectionStatus; +import io.airbyte.protocol.models.AirbyteConnectionStatus.Status; import java.math.BigDecimal; import java.nio.file.Path; import java.sql.JDBCType; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; class SnowflakeJdbcSourceAcceptanceTest extends JdbcSourceAcceptanceTest { @@ -26,10 +32,6 @@ class SnowflakeJdbcSourceAcceptanceTest extends JdbcSourceAcceptanceTest { static void init() { snConfig = Jsons .deserialize(IOs.readFile(Path.of("secrets/config.json"))); - } - - @BeforeEach - public void setup() throws Exception { // due to case sensitiveness in SnowflakeDB SCHEMA_NAME = "JDBC_INTEGRATION_TEST1"; SCHEMA_NAME2 = "JDBC_INTEGRATION_TEST2"; @@ -49,7 +51,10 @@ public void setup() throws Exception { ID_VALUE_3 = new BigDecimal(3); ID_VALUE_4 = new BigDecimal(4); ID_VALUE_5 = new BigDecimal(5); + } + @BeforeEach + public void setup() throws Exception { super.setup(); } @@ -79,4 +84,11 @@ public AbstractJdbcSource getJdbcSource() { return new SnowflakeSource(); } + @Test + void testCheckFailure() throws Exception { + ((ObjectNode) config.get("credentials")).put("password", "fake"); + final AirbyteConnectionStatus actual = source.check(config); + assertEquals(Status.FAILED, actual.getStatus()); + } + } diff --git a/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceAcceptanceTest.java index 9f7047ea306cb6..9b676083f03e90 100644 --- a/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceAcceptanceTest.java @@ -4,7 +4,10 @@ package io.airbyte.integrations.io.airbyte.integration_tests.sources; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.Lists; import io.airbyte.commons.io.IOs; import io.airbyte.commons.json.Jsons; @@ -26,6 +29,7 @@ import java.util.HashMap; import java.util.Map; import org.apache.commons.lang3.RandomStringUtils; +import org.junit.jupiter.api.Test; public class SnowflakeSourceAcceptanceTest extends SourceAcceptanceTest { @@ -35,8 +39,8 @@ public class SnowflakeSourceAcceptanceTest extends SourceAcceptanceTest { private static final String STREAM_NAME2 = "ID_AND_NAME2"; // config which refers to the schema that the test is being run in. - private JsonNode config; - private JdbcDatabase database; + protected JsonNode config; + protected JdbcDatabase database; @Override protected String getImageName() { @@ -90,17 +94,7 @@ protected JsonNode getState() { // for each test we create a new schema in the database. run the test in there and then remove it. @Override protected void setupEnvironment(final TestDestinationEnv environment) throws Exception { - config = Jsons.clone(getStaticConfig()); - database = Databases.createJdbcDatabase( - config.get("username").asText(), - config.get("password").asText(), - String.format("jdbc:snowflake://%s/", - config.get("host").asText()), - SnowflakeSource.DRIVER_CLASS, - Map.of("role", config.get("role").asText(), - "warehouse", config.get("warehouse").asText(), - "database", config.get("database").asText())); - + database = setupDataBase(); final String createSchemaQuery = String.format("CREATE SCHEMA IF NOT EXISTS %s", SCHEMA_NAME); final String createTableQuery1 = String .format("CREATE OR REPLACE TABLE %s.%s (ID INTEGER, NAME VARCHAR(200))", SCHEMA_NAME, @@ -130,4 +124,30 @@ protected void tearDown(final TestDestinationEnv testEnv) throws Exception { database.close(); } + protected JdbcDatabase setupDataBase() { + config = Jsons.clone(getStaticConfig()); + return Databases.createJdbcDatabase( + config.get("credentials").get("username").asText(), + config.get("credentials").get("password").asText(), + String.format("jdbc:snowflake://%s/", + config.get("host").asText()), + SnowflakeSource.DRIVER_CLASS, + Map.of("role", config.get("role").asText(), + "warehouse", config.get("warehouse").asText(), + "database", config.get("database").asText())); + } + + @Test + public void testBackwardCompatibilityAfterAddingOAuth() throws Exception { + final JsonNode deprecatedStyleConfig = Jsons.clone(config); + final JsonNode password = deprecatedStyleConfig.get("credentials").get("password"); + final JsonNode username = deprecatedStyleConfig.get("credentials").get("username"); + + ((ObjectNode) deprecatedStyleConfig).remove("credentials"); + ((ObjectNode) deprecatedStyleConfig).set("password", password); + ((ObjectNode) deprecatedStyleConfig).set("username", username); + + assertEquals("SUCCEEDED", runCheckAndGetStatusAsString(deprecatedStyleConfig).toUpperCase()); + } + } diff --git a/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceAuthAcceptanceTest.java b/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceAuthAcceptanceTest.java new file mode 100644 index 00000000000000..bdcc57e9e08c00 --- /dev/null +++ b/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceAuthAcceptanceTest.java @@ -0,0 +1,94 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.io.airbyte.integration_tests.sources; + +import com.fasterxml.jackson.databind.JsonNode; +import com.zaxxer.hikari.HikariDataSource; +import io.airbyte.commons.io.IOs; +import io.airbyte.commons.json.Jsons; +import io.airbyte.db.jdbc.JdbcDatabase; +import io.airbyte.db.jdbc.JdbcUtils; +import io.airbyte.db.jdbc.StreamingJdbcDatabase; +import io.airbyte.integrations.source.snowflake.SnowflakeDataSourceUtils; +import io.airbyte.integrations.source.snowflake.SnowflakeJdbcStreamingQueryConfiguration; +import java.io.IOException; +import java.nio.file.Path; +import java.util.Properties; +import javax.sql.DataSource; + +public class SnowflakeSourceAuthAcceptanceTest extends SnowflakeSourceAcceptanceTest { + + @Override + protected JdbcDatabase setupDataBase() { + config = getStaticConfig(); + final DataSource dataSource = createDataSource(getStaticConfig()); + return new StreamingJdbcDatabase(dataSource, + JdbcUtils.getDefaultSourceOperations(), + new SnowflakeJdbcStreamingQueryConfiguration()); + } + + private HikariDataSource createDataSource(final JsonNode config) { + HikariDataSource dataSource = new HikariDataSource(); + Properties properties = new Properties(); + + final StringBuilder jdbcUrl = new StringBuilder( + String.format("jdbc:snowflake://%s/?", config.get("host").asText())); + jdbcUrl.append(String.format( + "role=%s&warehouse=%s&database=%s&schema=%s&JDBC_QUERY_RESULT_FORMAT=%s&CLIENT_SESSION_KEEP_ALIVE=%s", + config.get("role").asText(), + config.get("warehouse").asText(), + config.get("database").asText(), + config.get("schema").asText(), + // Needed for JDK17 - see + // https://stackoverflow.com/questions/67409650/snowflake-jdbc-driver-internal-error-fail-to-retrieve-row-count-for-first-arrow + "JSON", + true)); + if (config.has("jdbc_url_params")) { + jdbcUrl.append(config.get("jdbc_url_params").asText()); + } + + var credentials = config.get("credentials"); + try { + properties.setProperty("client_id", credentials.get("client_id").asText()); + properties.setProperty("client_secret", credentials.get("client_secret").asText()); + properties.setProperty("refresh_token", credentials.get("refresh_token").asText()); + properties.setProperty("host", config.get("host").asText()); + var accessToken = SnowflakeDataSourceUtils.getAccessTokenUsingRefreshToken( + config.get("host").asText(), credentials.get("client_id").asText(), + credentials.get("client_secret").asText(), credentials.get("refresh_token").asText()); + properties.put("authenticator", "oauth"); + properties.put("token", accessToken); + } catch (IOException e) { + throw new RuntimeException(e); + } + + properties.put("warehouse", config.get("warehouse").asText()); + properties.put("account", config.get("host").asText()); + properties.put("role", config.get("role").asText()); + // allows queries to contain any number of statements + properties.put("MULTI_STATEMENT_COUNT", "0"); + // https://docs.snowflake.com/en/user-guide/jdbc-parameters.html#application + // identify airbyte traffic to snowflake to enable partnership & optimization opportunities + properties.put("dataSource.application", "airbyte"); + // Needed for JDK17 - see + // https://stackoverflow.com/questions/67409650/snowflake-jdbc-driver-internal-error-fail-to-retrieve-row-count-for-first-arrow + properties.put("JDBC_QUERY_RESULT_FORMAT", "JSON"); + + dataSource.setDriverClassName("net.snowflake.client.jdbc.SnowflakeDriver"); + dataSource.setJdbcUrl(jdbcUrl.toString()); + dataSource.setDataSourceProperties(properties); + return dataSource; + } + + JsonNode getStaticConfig() { + return Jsons + .deserialize(IOs.readFile(Path.of("secrets/config_auth.json"))); + } + + @Override + public void testBackwardCompatibilityAfterAddingOAuth() throws Exception { + // this test case is not valid for OAuth method + } +} diff --git a/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceDatatypeTest.java b/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceDatatypeTest.java index bf739e15246b0d..df49c9884d3fa2 100644 --- a/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceDatatypeTest.java +++ b/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceDatatypeTest.java @@ -51,8 +51,8 @@ protected Database setupDatabase() throws Exception { private Database getDatabase() { return Databases.createDatabase( - config.get("username").asText(), - config.get("password").asText(), + config.get("credentials").get("username").asText(), + config.get("credentials").get("password").asText(), String.format("jdbc:snowflake://%s/", config.get("host").asText()), SnowflakeSource.DRIVER_CLASS, diff --git a/airbyte-integrations/connectors/source-snowflake/src/test/java/io/airbyte/integrations/source/snowflake/SnowflakeDataSourceUtilsTest.java b/airbyte-integrations/connectors/source-snowflake/src/test/java/io/airbyte/integrations/source/snowflake/SnowflakeDataSourceUtilsTest.java new file mode 100644 index 00000000000000..bf7080d82b0a66 --- /dev/null +++ b/airbyte-integrations/connectors/source-snowflake/src/test/java/io/airbyte/integrations/source/snowflake/SnowflakeDataSourceUtilsTest.java @@ -0,0 +1,55 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.snowflake; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import io.airbyte.commons.json.Jsons; +import org.junit.jupiter.api.Test; + +class SnowflakeDataSourceUtilsTest { + + private final String config = """ + { + "host": "host", + "role": "role", + "schema": "SOURCE_SCHEMA", + "database": "DATABASE", + "warehouse": "WAREHOUSE", + "credentials": { + "auth_type": "OAuth", + "client_id": "someid", + "access_token": "**********", + "client_secret": "clientSecret", + "refresh_token": "token" + } + } + """; + private final String expectedJdbcUrl = + "jdbc:snowflake://host/?role=role&warehouse=WAREHOUSE&database=DATABASE&schema=SOURCE_SCHEMA&JDBC_QUERY_RESULT_FORMAT=JSON&CLIENT_SESSION_KEEP_ALIVE=true"; + + @Test + void testBuildJDBCUrl() { + JsonNode expectedConfig = Jsons.deserialize(config); + + String jdbcURL = SnowflakeDataSourceUtils.buildJDBCUrl(expectedConfig); + + assertEquals(expectedJdbcUrl, jdbcURL); + } + + @Test + void testBuildJDBCUrlWithParams() { + JsonNode expectedConfig = Jsons.deserialize(config); + String params = "someParameter1¶m2=someParameter2"; + ((ObjectNode) expectedConfig).put("jdbc_url_params", params); + + String jdbcURL = SnowflakeDataSourceUtils.buildJDBCUrl(expectedConfig); + + assertEquals(expectedJdbcUrl + "&" + params, jdbcURL); + } + +} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java index 9fb4056af3f5a8..06723d5b0ef31d 100644 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java +++ b/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java @@ -63,6 +63,7 @@ public OAuthImplementationFactory(final ConfigRepository configRepository, final .put("airbyte/source-shopify", new ShopifyOAuthFlow(configRepository, httpClient)) .put("airbyte/source-tiktok-marketing", new TikTokMarketingOAuthFlow(configRepository, httpClient)) .put("airbyte/destination-snowflake", new DestinationSnowflakeOAuthFlow(configRepository, httpClient)) + .put("airbyte/source-snowflake", new SourceSnowflakeOAuthFlow(configRepository, httpClient)) .build(); } diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/SourceSnowflakeOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/SourceSnowflakeOAuthFlow.java new file mode 100644 index 00000000000000..d9c976cf5ea370 --- /dev/null +++ b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/SourceSnowflakeOAuthFlow.java @@ -0,0 +1,144 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.oauth.flows; + +import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.collect.ImmutableMap; +import io.airbyte.commons.json.Jsons; +import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.oauth.BaseOAuth2Flow; +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; +import java.nio.charset.StandardCharsets; +import java.util.Base64; +import java.util.HashMap; +import java.util.Map; +import java.util.UUID; +import java.util.function.Supplier; +import org.apache.http.client.utils.URIBuilder; + +public class SourceSnowflakeOAuthFlow extends BaseOAuth2Flow { + + private static final String AUTHORIZE_URL = "https://%s/oauth/authorize"; + private static final String ACCESS_TOKEN_URL = "https://%s/oauth/token-request"; + + public SourceSnowflakeOAuthFlow(ConfigRepository configRepository, HttpClient httpClient) { + super(configRepository, httpClient); + } + + @VisibleForTesting + public SourceSnowflakeOAuthFlow(ConfigRepository configRepository, HttpClient httpClient, final Supplier stateSupplier) { + super(configRepository, httpClient, stateSupplier); + } + + @Override + protected String formatConsentUrl(UUID definitionId, + String clientId, + String redirectUrl, + JsonNode inputOAuthConfiguration) + throws IOException { + try { + return new URIBuilder(String.format(AUTHORIZE_URL, extractUrl(inputOAuthConfiguration))) + .addParameter("client_id", clientId) + .addParameter("redirect_uri", redirectUrl) + .addParameter("response_type", "code") + .addParameter("state", getState()) + .build().toString(); + } catch (final URISyntaxException e) { + throw new IOException("Failed to format Consent URL for OAuth flow", e); + } + } + + @Override + protected String getAccessTokenUrl(JsonNode inputOAuthConfiguration) { + return String.format(ACCESS_TOKEN_URL, extractUrl(inputOAuthConfiguration)); + } + + @Override + protected String extractCodeParameter(Map queryParams) throws IOException { + return super.extractCodeParameter(queryParams); + } + + @Override + protected Map getAccessTokenQueryParameters(String clientId, + String clientSecret, + String authCode, + String redirectUrl) { + return ImmutableMap.builder() + // required + .put("grant_type", "authorization_code") + .put("code", authCode) + .put("redirect_uri", redirectUrl) + .build(); + } + + @Override + protected Map completeOAuthFlow(final String clientId, + final String clientSecret, + final String authCode, + final String redirectUrl, + final JsonNode inputOAuthConfiguration, + final JsonNode oAuthParamConfig) + throws IOException { + final var accessTokenUrl = getAccessTokenUrl(inputOAuthConfiguration); + final byte[] authorization = Base64.getEncoder() + .encode((clientId + ":" + clientSecret).getBytes(StandardCharsets.UTF_8)); + final HttpRequest request = HttpRequest.newBuilder() + .POST(HttpRequest.BodyPublishers + .ofString(tokenReqContentType.getConverter().apply( + getAccessTokenQueryParameters(clientId, clientSecret, authCode, redirectUrl)))) + .uri(URI.create(accessTokenUrl)) + .header("Content-Type", tokenReqContentType.getContentType()) + .header("Accept", "application/json") + .header("Authorization", "Basic " + new String(authorization, StandardCharsets.UTF_8)) + .build(); + try { + final HttpResponse response = httpClient.send(request, + HttpResponse.BodyHandlers.ofString()); + + return extractOAuthOutput(Jsons.deserialize(response.body()), accessTokenUrl); + } catch (final InterruptedException e) { + throw new IOException("Failed to complete OAuth flow", e); + } + } + + @Override + protected Map extractOAuthOutput(JsonNode data, String accessTokenUrl) + throws IOException { + final Map result = new HashMap<>(); + // access_token is valid for only 10 minutes + if (data.has("access_token")) { + result.put("access_token", data.get("access_token").asText()); + } else { + throw new IOException(String.format("Missing 'access_token' in query params from %s", + accessTokenUrl)); + } + + if (data.has("refresh_token")) { + result.put("refresh_token", data.get("refresh_token").asText()); + } else { + throw new IOException(String.format("Missing 'refresh_token' in query params from %s", + accessTokenUrl)); + } + if (data.has("username")) { + result.put("username", data.get("username").asText()); + } else { + throw new IOException(String.format("Missing 'username' in query params from %s", + accessTokenUrl)); + } + return result; + } + + private String extractUrl(JsonNode inputOAuthConfiguration) { + var url = inputOAuthConfiguration.get("host"); + return url == null ? "snowflakecomputing.com" : url.asText(); + } + +} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/SnowflakeOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/SnowflakeOAuthFlowTest.java new file mode 100644 index 00000000000000..e982170807f00d --- /dev/null +++ b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/SnowflakeOAuthFlowTest.java @@ -0,0 +1,82 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.oauth.flows; + +import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.collect.ImmutableMap; +import io.airbyte.commons.json.Jsons; +import io.airbyte.oauth.BaseOAuthFlow; +import io.airbyte.oauth.MoreOAuthParameters; +import java.util.Map; +import org.junit.jupiter.api.Test; + +public class SnowflakeOAuthFlowTest extends BaseOAuthFlowTest { + + @Override + protected BaseOAuthFlow getOAuthFlow() { + return new SourceSnowflakeOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); + } + + @Override + protected String getExpectedConsentUrl() { + return "https://account.aws.snowflakecomputing.com/oauth/authorize?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&response_type=code&state=state"; + } + + @Override + protected Map getExpectedOutput() { + return Map.of( + "access_token", "access_token_response", + "refresh_token", "refresh_token_response", + "username", "username"); + } + + @Override + protected JsonNode getCompleteOAuthOutputSpecification() { + return getJsonSchema(Map.of("access_token", Map.of("type", "string"), "refresh_token", Map.of("type", "string"))); + } + + @Override + protected Map getExpectedFilteredOutput() { + return Map.of( + "access_token", "access_token_response", + "refresh_token", "refresh_token_response", + "client_id", MoreOAuthParameters.SECRET_MASK); + } + + protected JsonNode getOAuthParamConfig() { + return Jsons.jsonNode(ImmutableMap.builder() + .put("client_id", "test_client_id") + .put("client_secret", "test_client_secret") + .build()); + } + + @Override + protected JsonNode getInputOAuthConfiguration() { + return Jsons.jsonNode(ImmutableMap.builder() + .put("host", "account.aws.snowflakecomputing.com") + .build()); + } + + protected JsonNode getUserInputFromConnectorConfigSpecification() { + return getJsonSchema(Map.of("host", Map.of("type", "string"))); + } + + @Test + @Override + public void testGetSourceConsentUrlEmptyOAuthSpec() {} + + @Test + @Override + public void testGetDestinationConsentUrlEmptyOAuthSpec() {} + + @Test + @Override + public void testDeprecatedCompleteDestinationOAuth() {} + + @Test + @Override + public void testDeprecatedCompleteSourceOAuth() {} + +} diff --git a/docs/integrations/sources/snowflake.md b/docs/integrations/sources/snowflake.md index 0c9a15e483baf0..f500081c8b2a63 100644 --- a/docs/integrations/sources/snowflake.md +++ b/docs/integrations/sources/snowflake.md @@ -72,10 +72,38 @@ You can limit this grant down to specific schemas instead of the whole database. Your database user should now be ready for use with Airbyte. +###Authentication +#### There are 2 way ways of oauth supported: login\pass and oauth2. + +### Login and Password +| Field | Description | +|---|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [Host](https://docs.snowflake.com/en/user-guide/admin-account-identifier.html) | The host domain of the snowflake instance (must include the account, region, cloud environment, and end with snowflakecomputing.com). Example: `accountname.us-east-2.aws.snowflakecomputing.com` | +| [Role](https://docs.snowflake.com/en/user-guide/security-access-control-overview.html#roles) | The role you created in Step 1 for Airbyte to access Snowflake. Example: `AIRBYTE_ROLE` | +| [Warehouse](https://docs.snowflake.com/en/user-guide/warehouses-overview.html#overview-of-warehouses) | The warehouse you created in Step 1 for Airbyte to sync data into. Example: `AIRBYTE_WAREHOUSE` | +| [Database](https://docs.snowflake.com/en/sql-reference/ddl-database.html#database-schema-share-ddl) | The database you created in Step 1 for Airbyte to sync data into. Example: `AIRBYTE_DATABASE` | +| [Schema](https://docs.snowflake.com/en/sql-reference/ddl-database.html#database-schema-share-ddl) | The default schema used as the target schema for all statements issued from the connection that do not explicitly specify a schema name. | +| Username | The username you created in Step 2 to allow Airbyte to access the database. Example: `AIRBYTE_USER` | +| Password | The password associated with the username. | +| [JDBC URL Params](https://docs.snowflake.com/en/user-guide/jdbc-parameters.html) (Optional) | Additional properties to pass to the JDBC URL string when connecting to the database formatted as `key=value` pairs separated by the symbol `&`. Example: `key1=value1&key2=value2&key3=value3` | + + +### OAuth 2.0 +Field | Description | +|---|---| +| [Host](https://docs.snowflake.com/en/user-guide/admin-account-identifier.html) | The host domain of the snowflake instance (must include the account, region, cloud environment, and end with snowflakecomputing.com). Example: `accountname.us-east-2.aws.snowflakecomputing.com` | +| [Role](https://docs.snowflake.com/en/user-guide/security-access-control-overview.html#roles) | The role you created in Step 1 for Airbyte to access Snowflake. Example: `AIRBYTE_ROLE` | +| [Warehouse](https://docs.snowflake.com/en/user-guide/warehouses-overview.html#overview-of-warehouses) | The warehouse you created in Step 1 for Airbyte to sync data into. Example: `AIRBYTE_WAREHOUSE` | +| [Database](https://docs.snowflake.com/en/sql-reference/ddl-database.html#database-schema-share-ddl) | The database you created in Step 1 for Airbyte to sync data into. Example: `AIRBYTE_DATABASE` | +| [Schema](https://docs.snowflake.com/en/sql-reference/ddl-database.html#database-schema-share-ddl) | The default schema used as the target schema for all statements issued from the connection that do not explicitly specify a schema name. | +| OAuth2 | The Login name and password to obtain auth token. | +| [JDBC URL Params](https://docs.snowflake.com/en/user-guide/jdbc-parameters.html) (Optional) | Additional properties to pass to the JDBC URL string when connecting to the database formatted as `key=value` pairs separated by the symbol `&`. Example: `key1=value1&key2=value2&key3=value3` | + ## Changelog | Version | Date | Pull Request | Subject | | :--- | :--- | :--- | :--- | +| 0.1.11 | 2022-04-27 | [10953](https://github.com/airbytehq/airbyte/pull/10953) | Implement OAuth flow | | 0.1.9 | 2022-02-21 | [10242](https://github.com/airbytehq/airbyte/pull/10242) | Fixed cursor for old connectors that use non-microsecond format. Now connectors work with both formats | | 0.1.8 | 2022-02-18 | [10242](https://github.com/airbytehq/airbyte/pull/10242) | Updated timestamp transformation with microseconds | | 0.1.7 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `-XX:+ExitOnOutOfMemoryError` JVM option | From c0c27f407cf04d31189431657c82e46328a1c53f Mon Sep 17 00:00:00 2001 From: Eric Date: Wed, 27 Apr 2022 19:00:40 +0200 Subject: [PATCH 009/152] Fix typo on `DATABASE_URL` comment (#12404) --- .env | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.env b/.env index 056bc9558a3898..428dc71fdc9b4a 100644 --- a/.env +++ b/.env @@ -40,7 +40,7 @@ DATABASE_PASSWORD=docker DATABASE_HOST=db DATABASE_PORT=5432 DATABASE_DB=airbyte -# translate manually DATABASE_URL=jdbc:postgresql://${DATABASE_HOST}:${DATABASE_PORT/${DATABASE_DB} (do not include the username or password here) +# translate manually DATABASE_URL=jdbc:postgresql://${DATABASE_HOST}:${DATABASE_PORT}/${DATABASE_DB} (do not include the username or password here) DATABASE_URL=jdbc:postgresql://db:5432/airbyte JOBS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION=0.29.15.001 From d612b8abe5159f62f1416fcba5caa6011c424787 Mon Sep 17 00:00:00 2001 From: midavadim Date: Wed, 27 Apr 2022 20:33:15 +0300 Subject: [PATCH 010/152] :tada: Source tiktok marketing: fixed specs, updated docs (#12380) * specs: changed order for input params, marked required params * updated docs * updated specs test * updated connector version * auto-bump connector version * updated seed files Co-authored-by: Octavia Squidington III --- .../resources/seed/source_definitions.yaml | 2 +- .../src/main/resources/seed/source_specs.yaml | 54 +- .../source-tiktok-marketing/Dockerfile | 2 +- .../integration_tests/spec.json | 36 +- .../source_tiktok_marketing/spec.py | 16 +- docs/integrations/sources/tiktok-marketing.md | 541 ++++++++++++++++-- 6 files changed, 554 insertions(+), 97 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index 3ec994c8549164..746412679cfb8f 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -799,7 +799,7 @@ - name: TikTok Marketing sourceDefinitionId: 4bfac00d-ce15-44ff-95b9-9e3c3e8fbd35 dockerRepository: airbyte/source-tiktok-marketing - dockerImageTag: 0.1.6 + dockerImageTag: 0.1.7 documentationUrl: https://docs.airbyte.io/integrations/sources/tiktok-marketing icon: tiktok.svg sourceType: api diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index 333e61f9bada49..631af548711a59 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -8636,7 +8636,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-tiktok-marketing:0.1.6" +- dockerImage: "airbyte/source-tiktok-marketing:0.1.7" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/tiktok-marketing" changelogUrl: "https://docs.airbyte.io/integrations/sources/tiktok-marketing" @@ -8644,31 +8644,10 @@ title: "TikTok Marketing Source Spec" type: "object" properties: - start_date: - title: "Start Date" - description: "The Start Date in format: YYYY-MM-DD. Any data before this\ - \ date will not be replicated. If this parameter is not set, all data\ - \ will be replicated." - default: "2016-09-01" - pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" - order: 0 - type: "string" - report_granularity: - title: "Report Granularity" - description: "Which time granularity should be grouped by; for LIFETIME\ - \ there will be no grouping. This option is used for reports' streams\ - \ only." - default: "DAY" - enum: - - "LIFETIME" - - "DAY" - - "HOUR" - order: 1 - type: "string" credentials: - title: "Authorization Method" + title: "Authentication *" default: {} - order: 3 + order: 0 type: "object" oneOf: - title: "OAuth2.0" @@ -8678,8 +8657,6 @@ title: "Auth Type" const: "oauth2.0" order: 0 - enum: - - "oauth2.0" type: "string" app_id: title: "App ID" @@ -8707,8 +8684,6 @@ title: "Auth Type" const: "prod_access_token" order: 0 - enum: - - "prod_access_token" type: "string" app_id: title: "App ID" @@ -8735,8 +8710,6 @@ title: "Auth Type" const: "sandbox_access_token" order: 0 - enum: - - "sandbox_access_token" type: "string" advertiser_id: title: "Advertiser ID" @@ -8751,6 +8724,27 @@ required: - "advertiser_id" - "access_token" + start_date: + title: "Start Date *" + description: "The Start Date in format: YYYY-MM-DD. Any data before this\ + \ date will not be replicated. If this parameter is not set, all data\ + \ will be replicated." + default: "2016-09-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 1 + type: "string" + report_granularity: + title: "Report Granularity *" + description: "Which time granularity should be grouped by; for LIFETIME\ + \ there will be no grouping. This option is used for reports' streams\ + \ only." + default: "DAY" + enum: + - "LIFETIME" + - "DAY" + - "HOUR" + order: 2 + type: "string" supportsIncremental: true supportsNormalization: false supportsDBT: false diff --git a/airbyte-integrations/connectors/source-tiktok-marketing/Dockerfile b/airbyte-integrations/connectors/source-tiktok-marketing/Dockerfile index 690dfa479c1417..5524acd9717b7c 100644 --- a/airbyte-integrations/connectors/source-tiktok-marketing/Dockerfile +++ b/airbyte-integrations/connectors/source-tiktok-marketing/Dockerfile @@ -32,5 +32,5 @@ COPY source_tiktok_marketing ./source_tiktok_marketing ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.6 +LABEL io.airbyte.version=0.1.7 LABEL io.airbyte.name=airbyte/source-tiktok-marketing diff --git a/airbyte-integrations/connectors/source-tiktok-marketing/integration_tests/spec.json b/airbyte-integrations/connectors/source-tiktok-marketing/integration_tests/spec.json index bd914e8e5d62a8..da6cad26a536c7 100644 --- a/airbyte-integrations/connectors/source-tiktok-marketing/integration_tests/spec.json +++ b/airbyte-integrations/connectors/source-tiktok-marketing/integration_tests/spec.json @@ -5,26 +5,10 @@ "title": "TikTok Marketing Source Spec", "type": "object", "properties": { - "start_date": { - "title": "Start Date", - "description": "The Start Date in format: YYYY-MM-DD. Any data before this date will not be replicated. If this parameter is not set, all data will be replicated.", - "default": "2016-09-01", - "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}$", - "order": 0, - "type": "string" - }, - "report_granularity": { - "title": "Report Granularity", - "description": "Which time granularity should be grouped by; for LIFETIME there will be no grouping. This option is used for reports' streams only.", - "default": "DAY", - "enum": ["LIFETIME", "DAY", "HOUR"], - "order": 1, - "type": "string" - }, "credentials": { - "title": "Authorization Method", + "title": "Authentication *", "default": {}, - "order": 3, + "order": 0, "type": "object", "oneOf": [ { @@ -113,6 +97,22 @@ "required": ["advertiser_id", "access_token"] } ] + }, + "start_date": { + "title": "Start Date *", + "description": "The Start Date in format: YYYY-MM-DD. Any data before this date will not be replicated. If this parameter is not set, all data will be replicated.", + "default": "2016-09-01", + "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}$", + "order": 1, + "type": "string" + }, + "report_granularity": { + "title": "Report Granularity *", + "description": "Which time granularity should be grouped by; for LIFETIME there will be no grouping. This option is used for reports' streams only.", + "default": "DAY", + "enum": ["LIFETIME", "DAY", "HOUR"], + "order": 2, + "type": "string" } } }, diff --git a/airbyte-integrations/connectors/source-tiktok-marketing/source_tiktok_marketing/spec.py b/airbyte-integrations/connectors/source-tiktok-marketing/source_tiktok_marketing/spec.py index 79019570f0ce54..f33e829befff06 100644 --- a/airbyte-integrations/connectors/source-tiktok-marketing/source_tiktok_marketing/spec.py +++ b/airbyte-integrations/connectors/source-tiktok-marketing/source_tiktok_marketing/spec.py @@ -57,26 +57,26 @@ class SourceTiktokMarketingSpec(BaseModel): class Config: title = "TikTok Marketing Source Spec" + credentials: Union[OauthCredSpec, ProductionEnvSpec, SandboxEnvSpec] = Field( + title="Authentication *", order=0, default={}, type="object" + ) + start_date: str = Field( - title="Start Date", + title="Start Date *", default=DEFAULT_START_DATE, pattern="^[0-9]{4}-[0-9]{2}-[0-9]{2}$", description="The Start Date in format: YYYY-MM-DD. Any data before this date will not be replicated. " "If this parameter is not set, all data will be replicated.", - order=0, + order=1, ) report_granularity: str = Field( - title="Report Granularity", + title="Report Granularity *", description="Which time granularity should be grouped by; for LIFETIME there will be no grouping. " "This option is used for reports' streams only.", default=ReportGranularity.default().value, enum=[g.value for g in ReportGranularity], - order=1, - ) - - credentials: Union[OauthCredSpec, ProductionEnvSpec, SandboxEnvSpec] = Field( - title="Authorization Method", order=3, default={}, type="object" + order=2, ) @classmethod diff --git a/docs/integrations/sources/tiktok-marketing.md b/docs/integrations/sources/tiktok-marketing.md index 0642ece8c5a44f..9ae8455bfcad27 100644 --- a/docs/integrations/sources/tiktok-marketing.md +++ b/docs/integrations/sources/tiktok-marketing.md @@ -1,64 +1,527 @@ # TikTok Marketing -## Overview +This page guides you through the process of setting up the TikTok Marketing source connector. -The [TikTok For Business Marketing API](https://ads.tiktok.com/marketing_api/homepage?rid=uvtbok1h19) allows you to directly interact with the TikTok Ads Manager platform for automated ad management and analysis. +## Prerequisites -The TikTok Marketing source supports both Full Refresh and Incremental syncs. You can choose if this connector will copy only the new or updated data, or all rows in the tables and columns you set up for replication, every time a sync is run. +For Production environment: +* Access token +* Secret +* App ID -This Source Connector is based on a [Airbyte CDK](https://docs.airbyte.io/connector-development/cdk-python). +For Sandbox environment: +* Access token +* Advertiser ID -### Streams information +* Start date +* Report Granularity (LIFETIME, DAY, HOUR) -| Stream | Environment | Granularities | Key | Incremental | Schema | -|:----------------------------------|--------------|-------------------|-------------|:---------------|-----------------------------------------------------------------------------------------------| -| Advertisers | Prod,Sandbox | LIFETIME,DAY,HOUR | id | No | [Link](https://business-api.tiktok.com/marketing_api/docs?id=1708503202263042) | -| AdGroups | Prod,Sandbox | LIFETIME,DAY,HOUR | adgroup_id | Yes (DAY,HOUR) | [Link](https://business-api.tiktok.com/marketing_api/docs?id=1708503489590273) | -| Ads | Prod,Sandbox | LIFETIME,DAY,HOUR | ad_id | Yes (DAY,HOUR) | [Link](https://business-api.tiktok.com/marketing_api/docs?id=1708572923161602) | -| Campaigns | Prod,Sandbox | LIFETIME,DAY,HOUR | campaign_id | Yes (DAY,HOUR) | [Link](https://business-api.tiktok.com/marketing_api/docs?id=1708582970809346) | -| AdsReports | Prod,Sandbox | LIFETIME,DAY,HOUR | None | Yes (DAY,HOUR) | [BasicReportsLink](https://business-api.tiktok.com/marketing_api/docs?id=1707957200780290) | -| AdvertisersReports | Prod | LIFETIME,DAY,HOUR | None | Yes (DAY,HOUR) | [BasicReportsLink](https://business-api.tiktok.com/marketing_api/docs?id=1707957200780290) | -| AdGroupsReports | Prod,Sandbox | LIFETIME,DAY,HOUR | None | Yes (DAY,HOUR) | [BasicReportsLink](https://business-api.tiktok.com/marketing_api/docs?id=1707957200780290) | -| CampaignsReports | Prod,Sandbox | LIFETIME,DAY,HOUR | None | Yes (DAY,HOUR) | [BasicReportsLink](https://business-api.tiktok.com/marketing_api/docs?id=1707957200780290) | -| AdvertisersAudienceReports | Prod | LIFETIME,DAY,HOUR | None | Yes (DAY,HOUR) | [AudienceReportsLink](https://business-api.tiktok.com/marketing_api/docs?id=1707957217727489) | -| AdGroupAudienceReports | Prod,Sandbox | DAY,HOUR | None | Yes (DAY,HOUR) | [AudienceReportsLink](https://business-api.tiktok.com/marketing_api/docs?id=1707957217727489) | -| AdsAudienceReports | Prod,Sandbox | DAY,HOUR | None | Yes (DAY,HOUR) | [AudienceReportsLink](https://business-api.tiktok.com/marketing_api/docs?id=1707957217727489) | -| CampaignsAudienceReportsByCountry | Prod,Sandbox | DAY,HOUR | None | Yes (DAY,HOUR) | [AudienceReportsLink](https://business-api.tiktok.com/marketing_api/docs?id=1707957217727489) | +## Step 1: Set up TikTok -If there are more endpoints you'd like Airbyte to support, please [create an issue.](https://github.com/airbytehq/airbyte/issues/new/choose) +1. Create a TikTok For Business account: [Link](https://ads.tiktok.com/marketing_api/docs?rid=fgvgaumno25&id=1702715936951297) +2. Create developer application: [Link](https://ads.tiktok.com/marketing_api/docs?rid=fgvgaumno25&id=1702716474845185) +3. For sandbox environment: create a Sandbox Ad Account [Link](https://ads.tiktok.com/marketing_api/docs?rid=fgvgaumno25&id=1701890920013825) -### Features +## Step 2: Set up the source connector in Airbyte -| Feature | Supported? | -| :--- | :--- | -| Full Refresh Sync | Yes | -| Incremental - Append Sync | Yes | -| SSL connection | Yes | -| Namespaces | No | +**For Airbyte Cloud:** -### Performance considerations +1. [Log into your Airbyte Cloud](https://cloud.airbyte.io/workspaces) account. +2. In the left navigation bar, click **Sources**. In the top-right corner, click **+ new source**. +3. On the source setup page, select **Tiktok Marketing** from the Source type dropdown and enter a name for this connector. +4. Select `OAuth2.0` Authorization method, then click `Authenticate your account`. +5. Log in and Authorize to the Tiktok account +6. Choose required Start date and report granularity +7. click `Set up source`. -The connector is restricted by [requests limitation](https://ads.tiktok.com/marketing_api/docs?rid=fgvgaumno25&id=1701890997610497). This connector should not run into TikTok Marketing API limitations under normal usage. Please [create an issue](https://github.com/airbytehq/airbyte/issues) if you see any rate limit issues that are not automatically retried successfully. +**For Airbyte OSS:** -## Getting started +1. Go to local Airbyte page. +2. In the left navigation bar, click **Sources**. In the top-right corner, click **+ new source**. +3. On the Set up the source page, enter the name for the connector and select **Tiktok Marketing** from the Source type dropdown. +4. Select `Production Access Token` or `Sandbox Access Token` Authorization method, then copy and paste info from step 1. +5. Choose required Start date and report granularity +6. Click `Set up source`. -### Requirements +## Supported streams and sync modes -* Access Token - This token will not expire. -* Production Environment - * App ID - * Secret -* SandBox Environment - * Advertiser ID - It is generated for sandbox in one copy +| Stream | Environment | Granularities | Key | Incremental | +|:----------------------------------|--------------|-------------------|-------------|:---------------| +| Advertisers | Prod,Sandbox | LIFETIME,DAY,HOUR | id | No | +| AdGroups | Prod,Sandbox | LIFETIME,DAY,HOUR | adgroup_id | Yes (DAY,HOUR) | +| Ads | Prod,Sandbox | LIFETIME,DAY,HOUR | ad_id | Yes (DAY,HOUR) | +| Campaigns | Prod,Sandbox | LIFETIME,DAY,HOUR | campaign_id | Yes (DAY,HOUR) | +| AdsReports | Prod,Sandbox | LIFETIME,DAY,HOUR | None | Yes (DAY,HOUR) | +| AdvertisersReports | Prod | LIFETIME,DAY,HOUR | None | Yes (DAY,HOUR) | +| AdGroupsReports | Prod,Sandbox | LIFETIME,DAY,HOUR | None | Yes (DAY,HOUR) | +| CampaignsReports | Prod,Sandbox | LIFETIME,DAY,HOUR | None | Yes (DAY,HOUR) | +| AdvertisersAudienceReports | Prod | LIFETIME,DAY,HOUR | None | Yes (DAY,HOUR) | +| AdGroupAudienceReports | Prod,Sandbox | DAY,HOUR | None | Yes (DAY,HOUR) | +| AdsAudienceReports | Prod,Sandbox | DAY,HOUR | None | Yes (DAY,HOUR) | +| CampaignsAudienceReportsByCountry | Prod,Sandbox | DAY,HOUR | None | Yes (DAY,HOUR) | -### Setup guide +**[Advertisers](https://ads.tiktok.com/marketing_api/docs?id=1708503202263042) Stream** +``` +{ + "contacter": "Ai***te", + "phonenumber": "+13*****5753", + "license_no": "", + "promotion_center_city": null, + "balance": 10, + "license_url": null, + "timezone": "Etc/GMT+8", + "reason": "", + "telephone": "+14*****6785", + "id": 7002238017842757633, + "language": "en", + "country": "US", + "role": "ROLE_ADVERTISER", + "license_province": null, + "display_timezone": "America/Los_Angeles", + "email": "i***************@**********", + "license_city": null, + "industry": "291905", + "create_time": 1630335591, + "promotion_center_province": null, + "address": "350 29th avenue, San Francisco", + "currency": "USD", + "promotion_area": "0", + "status": "STATUS_ENABLE", + "description": "https://", + "brand": null, + "name": "Airbyte0830", + "company": "Airbyte" +} +``` -Please read [How to get your AppID, Secret and Access Token](https://ads.tiktok.com/marketing_api/docs?rid=fgvgaumno25&id=1701890909484033) or [How to create a SandBox Environment](https://ads.tiktok.com/marketing_api/docs?rid=fgvgaumno25&id=1701890920013825) +**[AdGroups](https://ads.tiktok.com/marketing_api/docs?id=1708503489590273) Stream** +``` +{ + "placement_type": "PLACEMENT_TYPE_AUTOMATIC", + "budget": 20, + "budget_mode": "BUDGET_MODE_DAY", + "display_mode": null, + "schedule_infos": null, + "billing_event": "CPC", + "conversion_window": null, + "adgroup_name": "Ad Group20211020010107", + "interest_keywords": [], + "is_comment_disable": 0, + "rf_buy_type": null, + "frequency": null, + "bid_type": "BID_TYPE_NO_BID", + "placement": null, + "bid": 0, + "include_custom_actions": [], + "operation_system": [], + "pixel_id": null, + "dayparting": "111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111", + "app_type": null, + "conversion_id": 0, + "rf_predict_cpr": null, + "deep_bid_type": null, + "scheduled_budget": 0.0, + "adgroup_id": 1714125049901106, + "frequency_schedule": null, + "exclude_custom_actions": [], + "advertiser_id": 7002238017842757633, + "deep_cpabid": 0, + "is_new_structure": true, + "buy_impression": null, + "external_type": "WEBSITE", + "excluded_audience": [], + "deep_external_action": null, + "interest_category_v2": [], + "rf_predict_frequency": null, + "audience": [], + "pacing": "PACING_MODE_SMOOTH", + "brand_safety_partner": null, + "daily_retention_ratio": null, + "optimize_goal": "CLICK", + "enable_search_result": false, + "conversion_bid": 0, + "schedule_end_time": "2021-10-31 09:01:07", + "opt_status": "ENABLE", + "status": "ADGROUP_STATUS_CAMPAIGN_DISABLE", + "app_id": null, + "external_action": null, + "schedule_type": "SCHEDULE_START_END", + "brand_safety": "NO_BRAND_SAFETY", + "campaign_id": 1714125042508817, + "campaign_name": "Website Traffic20211020010104", + "split_test_adgroup_ids": [], + "action_v2": [], + "is_hfss": false, + "keywords": null, + "create_time": "2021-10-20 08:04:05", + "feed_type": null, + "languages": ["en"], + "enable_inventory_filter": false, + "device_price": [], + "location": [6252001], + "schedule_start_time": "2021-10-20 09:01:07", + "skip_learning_phase": 0, + "gender": "GENDER_UNLIMITED", + "creative_material_mode": "CUSTOM", + "app_download_url": null, + "device_models": [], + "automated_targeting": "OFF", + "connection_type": [], + "ios14_quota_type": "UNOCCUPIED", + "modify_time": "2022-03-24 12:06:54", + "category": 0, + "statistic_type": null, + "video_download": "ALLOW_DOWNLOAD", + "age": ["AGE_25_34", "AGE_35_44", "AGE_45_54"], + "buy_reach": null, + "is_share_disable": false +} +``` + +**[Ads](https://ads.tiktok.com/marketing_api/docs?id=1708572923161602) Stream** +``` +{ + "vast_moat": false, + "is_new_structure": true, + "campaign_name": "CampaignVadimTraffic", + "landing_page_urls": null, + "card_id": null, + "adgroup_id": 1728545385226289, + "campaign_id": 1728545382536225, + "status": "AD_STATUS_CAMPAIGN_DISABLE", + "brand_safety_postbid_partner": "UNSET", + "advertiser_id": 7002238017842757633, + "is_aco": false, + "ad_text": "Open-source\ndata integration for modern data teams", + "identity_id": "7080121820963422209", + "display_name": "airbyte", + "open_url": "", + "external_action": null, + "playable_url": "", + "create_time": "2022-03-28 12:09:09", + "product_ids": [], + "adgroup_name": "AdGroupVadim", + "fallback_type": "UNSET", + "creative_type": null, + "ad_name": "AdVadim-Optimized Version 3_202203281449_2022-03-28 05:03:44", + "video_id": "v10033g50000c90q1d3c77ub6e96fvo0", + "ad_format": "SINGLE_VIDEO", + "profile_image": "https://p21-ad-sg.ibyteimg.com/large/ad-site-i18n-sg/202203285d0de5c114d0690a462bb6a4", + "open_url_type": "NORMAL", + "click_tracking_url": null, + "page_id": null, + "ad_texts": null, + "landing_page_url": "https://airbyte.com", + "identity_type": "CUSTOMIZED_USER", + "avatar_icon_web_uri": "ad-site-i18n-sg/202203285d0de5c114d0690a462bb6a4", + "app_name": "", + "modify_time": "2022-03-28 21:34:26", + "opt_status": "ENABLE", + "call_to_action_id": "7080120957230238722", + "image_ids": ["v0201/7f371ff6f0764f8b8ef4f37d7b980d50"], + "ad_id": 1728545390695442, + "impression_tracking_url": null, + "is_creative_authorized": false +} +``` + +**[Campaigns](https://ads.tiktok.com/marketing_api/docs?id=1708582970809346) Stream** +``` +{ + "create_time": "2021-10-19 18:18:08", + "campaign_id": 1714073078669329, + "roas_bid": 0.0, + "advertiser_id": 7002238017842757633, + "modify_time": "2022-03-28 12:01:56", + "campaign_type": "REGULAR_CAMPAIGN", + "status": "CAMPAIGN_STATUS_DISABLE", + "objective_type": "TRAFFIC", + "split_test_variable": null, + "opt_status": "DISABLE", + "budget": 50, + "is_new_structure": true, + "deep_bid_type": null, + "campaign_name": "Website Traffic20211019110444", + "budget_mode": "BUDGET_MODE_DAY", + "objective": "LANDING_PAGE" +} +``` + +**AdsReports Stream - [BasicReports](https://ads.tiktok.com/marketing_api/docs?id=1707957200780290)** +``` +{ + "dimensions": { + "ad_id": 1728545390695442, + "stat_time_day": "2022-03-29 00:00:00" + }, + "metrics": { + "real_time_result_rate": 0.93, + "campaign_id": 1728545382536225, + "placement": "Automatic Placement", + "frequency": 1.17, + "cpc": 0.35, + "ctr": 0.93, + "cost_per_result": 0.3509, + "impressions": 6137, + "cost_per_conversion": 0, + "real_time_result": 57, + "adgroup_id": 1728545385226289, + "result_rate": 0.93, + "cost_per_1000_reached": 3.801, + "ad_text": "Open-source\ndata integration for modern data teams", + "spend": 20, + "conversion_rate": 0, + "real_time_cost_per_conversion": 0, + "promotion_type": "Website", + "tt_app_id": 0, + "real_time_cost_per_result": 0.3509, + "conversion": 0, + "secondary_goal_result": null, + "campaign_name": "CampaignVadimTraffic", + "cpm": 3.26, + "result": 57, + "ad_name": "AdVadim-Optimized Version 3_202203281449_2022-03-28 05:03:44", + "secondary_goal_result_rate": null, + "clicks": 57, + "reach": 5262, + "cost_per_secondary_goal_result": null, + "real_time_conversion": 0, + "real_time_conversion_rate": 0, + "mobile_app_id": "0", + "tt_app_name": "0", + "adgroup_name": "AdGroupVadim", + "dpa_target_audience_type": null + } +} +``` + +**AdvertisersReports Stream - [BasicReports](https://ads.tiktok.com/marketing_api/docs?id=1707957200780290)** +``` +{ + "metrics": { + "cpm": 5.43, + "impressions": 3682, + "frequency": 1.17, + "reach": 3156, + "cash_spend": 20, + "ctr": 1.14, + "spend": 20, + "cpc": 0.48, + "cost_per_1000_reached": 6.337, + "clicks": 42, + "voucher_spend": 0 + }, + "dimensions": { + "stat_time_day": "2022-03-30 00:00:00", + "advertiser_id": 7002238017842757633 + } +} + +``` + +**AdGroupsReports Stream - [BasicReports](https://ads.tiktok.com/marketing_api/docs?id=1707957200780290)** +``` +{ + "metrics": { + "real_time_conversion": 0, + "real_time_cost_per_conversion": 0, + "cost_per_1000_reached": 3.801, + "mobile_app_id": "0", + "reach": 5262, + "cpm": 3.26, + "conversion": 0, + "promotion_type": "Website", + "clicks": 57, + "real_time_result_rate": 0.93, + "real_time_conversion_rate": 0, + "cost_per_conversion": 0, + "dpa_target_audience_type": null, + "result": 57, + "cpc": 0.35, + "impressions": 6137, + "cost_per_result": 0.3509, + "tt_app_id": 0, + "cost_per_secondary_goal_result": null, + "frequency": 1.17, + "spend": 20, + "secondary_goal_result_rate": null, + "real_time_cost_per_result": 0.3509, + "real_time_result": 57, + "placement": "Automatic Placement", + "result_rate": 0.93, + "tt_app_name": "0", + "campaign_name": "CampaignVadimTraffic", + "secondary_goal_result": null, + "campaign_id": 1728545382536225, + "conversion_rate": 0, + "ctr": 0.93, + "adgroup_name": "AdGroupVadim" + }, + "dimensions": { + "adgroup_id": 1728545385226289, + "stat_time_day": "2022-03-29 00:00:00" + } +} +``` + +**CampaignsReports Stream - [BasicReports](https://ads.tiktok.com/marketing_api/docs?id=1707957200780290)** +``` +{ + "metrics": { + "cpc": 0.43, + "spend": 20, + "clicks": 46, + "cost_per_1000_reached": 4.002, + "impressions": 5870, + "ctr": 0.78, + "frequency": 1.17, + "cpm": 3.41, + "campaign_name": "CampaignVadimTraffic", + "reach": 4997 + }, + "dimensions": { + "campaign_id": 1728545382536225, + "stat_time_day": "2022-03-28 00:00:00" + } +} + +``` + +**AdsAudienceReports Stream - [AudienceReports](https://ads.tiktok.com/marketing_api/docs?id=1707957217727489)** +``` +{ + { + "result": 17, + "clicks": 17, + "real_time_conversion_rate": 0, + "adgroup_id": 1728545385226289, + "cpm": 3.01, + "cost_per_result": 0.4165, + "real_time_cost_per_result": 0.4165, + "mobile_app_id": 0, + "spend": 7.08, + "cpc": 0.42, + "placement": "Automatic Placement", + "real_time_conversion": 0, + "dpa_target_audience_type": null, + "real_time_result_rate": 0.72, + "adgroup_name": "AdGroupVadim", + "tt_app_id": 0, + "ctr": 0.72, + "ad_text": "Open-source\ndata integration for modern data teams", + "result_rate": 0.72, + "ad_name": "AdVadim-Optimized Version 3_202203281449_2022-03-28 05:03:44", + "conversion_rate": 0, + "real_time_result": 17, + "tt_app_name": "0", + "cost_per_conversion": 0, + "real_time_cost_per_conversion": 0, + "conversion": 0, + "impressions": 2350, + "promotion_type": "Website", + "campaign_id": 1728545382536225, + "campaign_name": "CampaignVadimTraffic" + }, + "dimensions": { + "gender": "MALE", + "age": "AGE_25_34", + "ad_id": 1728545390695442, + "stat_time_day": "2022-03-28 00:00:00" + } +} +``` + +**AdvertisersAudienceReports Stream - [AudienceReports](https://ads.tiktok.com/marketing_api/docs?id=1707957217727489)** +``` +{ + "dimensions": { + "stat_time_day": "2022-03-28 00:00:00", + "gender": "FEMALE", + "advertiser_id": 7002238017842757633, + "age": "AGE_35_44" + }, + "metrics": { + "spend": 3.09, + "ctr": 0.93, + "cpc": 0.44, + "clicks": 7, + "cpm": 4.11, + "impressions": 752 + } +} +``` + +**AdGroupAudienceReports Stream - [AudienceReports](https://ads.tiktok.com/marketing_api/docs?id=1707957217727489)** +``` +{ + "dimensions": { + "gender": "MALE", + "age": "AGE_25_34", + "stat_time_day": "2022-03-29 00:00:00", + "adgroup_id": 1728545385226289 + }, + "metrics": { + "cost_per_conversion": 0, + "campaign_id": 1728545382536225, + "campaign_name": "CampaignVadimTraffic", + "clicks": 20, + "dpa_target_audience_type": null, + "mobile_app_id": 0, + "promotion_type": "Website", + "conversion_rate": 0, + "cpm": 3.9, + "cost_per_result": 0.3525, + "cpc": 0.35, + "real_time_cost_per_conversion": 0, + "ctr": 1.11, + "spend": 7.05, + "result": 20, + "real_time_result": 20, + "impressions": 1806, + "conversion": 0, + "real_time_result_rate": 1.11, + "real_time_conversion_rate": 0, + "real_time_conversion": 0, + "adgroup_name": "AdGroupVadim", + "tt_app_name": "0", + "placement": "Automatic Placement", + "real_time_cost_per_result": 0.3525, + "result_rate": 1.11, + "tt_app_id": 0 + } +} +``` + +**CampaignsAudienceReportsByCountry Stream - [AudienceReports](https://ads.tiktok.com/marketing_api/docs?id=1707957217727489)** +``` +{ + "metrics": { + "impressions": 5870, + "campaign_name": "CampaignVadimTraffic", + "cpm": 3.41, + "clicks": 46, + "spend": 20, + "ctr": 0.78, + "cpc": 0.43 + }, + "dimensions": { + "stat_time_day": "2022-03-28 00:00:00", + "campaign_id": 1728545382536225, + "country_code": "US" + } +} + +``` + +## Performance considerations + +The connector is restricted by [requests limitation](https://ads.tiktok.com/marketing_api/docs?rid=fgvgaumno25&id=1725359439428610). This connector should not run into TikTok Marketing API limitations under normal usage. Please [create an issue](https://github.com/airbytehq/airbyte/issues) if you see any rate limit issues that are not automatically retried successfully. ## Changelog | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:----------------------------------------------------------------------------------------------| +| 0.1.7 | 2022-04-27 | [12380](https://github.com/airbytehq/airbyte/pull/12380) | fixed spec descriptions and documentation | | 0.1.6 | 2022-04-19 | [11378](https://github.com/airbytehq/airbyte/pull/11378) | updated logic for stream initializations, fixed errors in schemas, updated SAT and unit tests | | 0.1.5 | 2022-02-17 | [10398](https://github.com/airbytehq/airbyte/pull/10398) | Add Audience reports | | 0.1.4 | 2021-12-30 | [7636](https://github.com/airbytehq/airbyte/pull/7636) | Add OAuth support | From 2eb93560e62eab9ed027f1f68e04f9f115ef451d Mon Sep 17 00:00:00 2001 From: Denis Davydov Date: Wed, 27 Apr 2022 20:41:43 +0300 Subject: [PATCH 011/152] Source Smartsheets: incremental read and tests (#12077) * #5520 fix scrambled columns bug * #5520 source smartsheets: add changelog item * #5520 move pytest to optional setup requirements * #12003 source smartsheets: implement incremental read + tests * #12003 source smartsheet: add changelog * #12003 source smartsheets: fix merge conflict on unit tests * #12003 source smartsheets: fix startdate in spec * #12003 source smartsheets: add default start dt to spec * #12003 source smartsheets: add default start dt to spec * auto-bump connector version Co-authored-by: Octavia Squidington III --- .../resources/seed/source_definitions.yaml | 2 +- .../src/main/resources/seed/source_specs.yaml | 11 +- .../connectors/source-smartsheets/Dockerfile | 2 +- .../acceptance-test-config.yml | 10 + .../integration_tests/abnormal_state.json | 5 + .../integration_tests/configured_catalog.json | 3 +- .../integration_tests/expected_records.txt | 100 +++++++ .../source_smartsheets/sheet.py | 92 +++++++ .../source_smartsheets/source.py | 125 +-------- .../source_smartsheets/spec.json | 8 + .../source_smartsheets/streams.py | 58 ++++ .../source-smartsheets/unit_tests/conftest.py | 34 +++ .../unit_tests/response.json | 251 ++++++++++++++++++ .../unit_tests/test_sheets.py | 119 +++++++++ .../unit_tests/test_source.py | 44 +-- .../unit_tests/test_streams.py | 21 ++ docs/integrations/sources/smartsheets.md | 9 +- 17 files changed, 741 insertions(+), 153 deletions(-) create mode 100644 airbyte-integrations/connectors/source-smartsheets/integration_tests/abnormal_state.json create mode 100644 airbyte-integrations/connectors/source-smartsheets/integration_tests/expected_records.txt create mode 100644 airbyte-integrations/connectors/source-smartsheets/source_smartsheets/sheet.py create mode 100644 airbyte-integrations/connectors/source-smartsheets/source_smartsheets/streams.py create mode 100644 airbyte-integrations/connectors/source-smartsheets/unit_tests/conftest.py create mode 100644 airbyte-integrations/connectors/source-smartsheets/unit_tests/response.json create mode 100644 airbyte-integrations/connectors/source-smartsheets/unit_tests/test_sheets.py create mode 100644 airbyte-integrations/connectors/source-smartsheets/unit_tests/test_streams.py diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index 746412679cfb8f..d642115f0f67c4 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -737,7 +737,7 @@ - name: Smartsheets sourceDefinitionId: 374ebc65-6636-4ea0-925c-7d35999a8ffc dockerRepository: airbyte/source-smartsheets - dockerImageTag: 0.1.9 + dockerImageTag: 0.1.10 documentationUrl: https://docs.airbyte.io/integrations/sources/smartsheets icon: smartsheet.svg sourceType: api diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index 631af548711a59..f3e3f72075baae 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -7856,7 +7856,7 @@ oauthFlowOutputParameters: - - "access_token" - - "refresh_token" -- dockerImage: "airbyte/source-smartsheets:0.1.9" +- dockerImage: "airbyte/source-smartsheets:0.1.10" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/smartsheets" connectionSpecification: @@ -7878,6 +7878,15 @@ title: "Sheet ID" description: "The spreadsheet ID. Find in the spreadsheet menu: File > Properties" type: "string" + start_datetime: + title: "Start Datetime" + type: "string" + examples: + - "2000-01-01T13:00:00" + - "2000-01-01T13:00:00-07:00" + description: "ISO 8601, for instance: `YYYY-MM-DDTHH:MM:SS`, `YYYY-MM-DDTHH:MM:SS+HH:MM`" + format: "date-time" + default: "2020-01-01T00:00:00+00:00" supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] diff --git a/airbyte-integrations/connectors/source-smartsheets/Dockerfile b/airbyte-integrations/connectors/source-smartsheets/Dockerfile index 7907f022cc86a0..cb26f971e9dad1 100644 --- a/airbyte-integrations/connectors/source-smartsheets/Dockerfile +++ b/airbyte-integrations/connectors/source-smartsheets/Dockerfile @@ -14,5 +14,5 @@ COPY $CODE_PATH ./$CODE_PATH ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.9 +LABEL io.airbyte.version=0.1.10 LABEL io.airbyte.name=airbyte/source-smartsheets diff --git a/airbyte-integrations/connectors/source-smartsheets/acceptance-test-config.yml b/airbyte-integrations/connectors/source-smartsheets/acceptance-test-config.yml index 54c4a0e8df865a..063f068e5caa91 100644 --- a/airbyte-integrations/connectors/source-smartsheets/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-smartsheets/acceptance-test-config.yml @@ -12,6 +12,16 @@ tests: basic_read: - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + expect_records: + path: "integration_tests/expected_records.txt" + extra_fields: yes + exact_order: yes + extra_records: no full_refresh: - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" + incremental: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + future_state_path: "integration_tests/abnormal_state.json" diff --git a/airbyte-integrations/connectors/source-smartsheets/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-smartsheets/integration_tests/abnormal_state.json new file mode 100644 index 00000000000000..461ef6d45b5702 --- /dev/null +++ b/airbyte-integrations/connectors/source-smartsheets/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "aws_s3_sample": { + "modifiedAt": "2222-03-07T11:30:00+00:00" + } +} diff --git a/airbyte-integrations/connectors/source-smartsheets/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-smartsheets/integration_tests/configured_catalog.json index e263f7cae20890..f919a67cd985a0 100644 --- a/airbyte-integrations/connectors/source-smartsheets/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-smartsheets/integration_tests/configured_catalog.json @@ -13,7 +13,8 @@ "gender": { "type": "string" }, "ip_address": { "type": "string" }, "primary_email": { "type": "string" }, - "dob": { "type": "string", "format": "date" } + "dob": { "type": "string", "format": "date" }, + "modifiedAt": { "type": "string", "format": "date-time" } } }, "supported_sync_modes": ["full_refresh"] diff --git a/airbyte-integrations/connectors/source-smartsheets/integration_tests/expected_records.txt b/airbyte-integrations/connectors/source-smartsheets/integration_tests/expected_records.txt new file mode 100644 index 00000000000000..7d1e55999bc1e6 --- /dev/null +++ b/airbyte-integrations/connectors/source-smartsheets/integration_tests/expected_records.txt @@ -0,0 +1,100 @@ +{"stream": "aws_s3_sample", "data": {"id": "1.0", "first_name": "Joni", "last_name": "Watling", "email": "jwatling0@amazonaws.com", "gender": "Genderqueer", "ip_address": "195.50.216.194", "dob": "2020-11-23"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "2.0", "first_name": "Bernardo", "last_name": "Klaaassen", "email": "bklaaassen1@cbc.ca", "gender": "Polygender", "ip_address": "116.208.253.97", "dob": "2020-02-22"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "3.0", "first_name": "Drake", "last_name": "Bednell", "email": "dbednell2@theguardian.com", "gender": "Non-binary", "ip_address": "120.15.24.132", "dob": "2020-08-21"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "4.0", "first_name": "Alfreda", "last_name": "Brumbye", "email": "abrumbye3@howstuffworks.com", "gender": "Genderqueer", "ip_address": "64.22.217.122", "dob": "2020-12-29"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "5.0", "first_name": "Boycey", "last_name": "Brisson", "email": "bbrisson4@bizjournals.com", "gender": "Bigender", "ip_address": "59.220.127.45", "dob": "2020-06-26"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "6.0", "first_name": "Ursuline", "last_name": "Lintott", "email": "ulintott5@ow.ly", "gender": "Genderqueer", "ip_address": "47.253.138.238", "dob": "2020-07-26"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "7.0", "first_name": "Bettine", "last_name": "McKennan", "email": "bmckennan6@census.gov", "gender": "Bigender", "ip_address": "35.42.88.34", "dob": "2020-06-12"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "8.0", "first_name": "Eustace", "last_name": "Aaronsohn", "email": "eaaronsohn7@yale.edu", "gender": "Male", "ip_address": "84.153.189.160", "dob": "2020-12-14"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "9.0", "first_name": "Chrystel", "last_name": "Blum", "email": "cblum8@360.cn", "gender": "Bigender", "ip_address": "44.5.17.116", "dob": "2020-09-14"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "10.0", "first_name": "Kathryne", "last_name": "Cuncarr", "email": "kcuncarr9@hhs.gov", "gender": "Female", "ip_address": "50.63.175.212", "dob": "2020-06-24"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "11.0", "first_name": "Filmer", "last_name": "Ginni", "email": "fginnia@ucoz.com", "gender": "Genderfluid", "ip_address": "248.137.123.63", "dob": "2020-12-30"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "12.0", "first_name": "Anthiathia", "last_name": "Sketh", "email": "askethb@1688.com", "gender": "Female", "ip_address": "40.58.34.216", "dob": "2020-05-10"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "13.0", "first_name": "Pamella", "last_name": "Winterson", "email": "pwintersonc@biglobe.ne.jp", "gender": "Female", "ip_address": "173.8.175.104", "dob": "2020-06-28"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "14.0", "first_name": "Zuzana", "last_name": "Esmead", "email": "zesmeadd@bloglovin.com", "gender": "Polygender", "ip_address": "98.192.39.217", "dob": "2020-02-27"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "15.0", "first_name": "Donica", "last_name": "Jozaitis", "email": "djozaitise@amazon.de", "gender": "Female", "ip_address": "160.231.57.131", "dob": "2021-01-04"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "16.0", "first_name": "Pennie", "last_name": "Dunrige", "email": "pdunrigef@gravatar.com", "gender": "Non-binary", "ip_address": "208.255.160.56", "dob": "2020-03-01"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "17.0", "first_name": "Blanca", "last_name": "Allcroft", "email": "ballcroftg@furl.net", "gender": "Agender", "ip_address": "21.129.47.109", "dob": "2021-01-01"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "18.0", "first_name": "Webb", "last_name": "Simkins", "email": "wsimkinsh@qq.com", "gender": "Male", "ip_address": "2.125.148.89", "dob": "2020-06-16"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "19.0", "first_name": "Dorrie", "last_name": "Esser", "email": "desseri@rediff.com", "gender": "Female", "ip_address": "17.148.200.84", "dob": "2020-11-27"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "20.0", "first_name": "Kara", "last_name": "Gley", "email": "kgleyj@php.net", "gender": "Bigender", "ip_address": "117.130.134.124", "dob": "2020-12-02"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "21.0", "first_name": "Felicle", "last_name": "Roscrigg", "email": "froscriggk@java.com", "gender": "Female", "ip_address": "36.67.5.211", "dob": "2020-03-05"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "22.0", "first_name": "Carmine", "last_name": "Backshill", "email": "cbackshilll@addthis.com", "gender": "Polygender", "ip_address": "103.28.140.64", "dob": "2020-12-13"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "23.0", "first_name": "Helge", "last_name": "Kneeshaw", "email": "hkneeshawm@goo.gl", "gender": "Genderfluid", "ip_address": "154.154.89.226", "dob": "2020-07-15"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "24.0", "first_name": "Suzy", "last_name": "Ohm", "email": "sohmn@columbia.edu", "gender": "Bigender", "ip_address": "100.54.193.73", "dob": "2020-10-25"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "25.0", "first_name": "Bianka", "last_name": "Melmore", "email": "bmelmoreo@sohu.com", "gender": "Genderqueer", "ip_address": "38.63.204.171", "dob": "2020-11-02"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "26.0", "first_name": "Kim", "last_name": "Joust", "email": "kjoustp@sbwire.com", "gender": "Male", "ip_address": "87.176.59.210", "dob": "2020-12-27"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "27.0", "first_name": "Darrin", "last_name": "Warlawe", "email": "dwarlaweq@shinystat.com", "gender": "Male", "ip_address": "138.16.204.148", "dob": "2020-11-17"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "28.0", "first_name": "Edgard", "last_name": "Byfford", "email": "ebyffordr@spotify.com", "gender": "Polygender", "ip_address": "162.208.75.173", "dob": "2020-07-25"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "29.0", "first_name": "Dee", "last_name": "Bourgeois", "email": "dbourgeoiss@elegantthemes.com", "gender": "Polygender", "ip_address": "20.250.26.143", "dob": "2020-10-20"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "30.0", "first_name": "Fredrika", "last_name": "Ingry", "email": "fingryt@slashdot.org", "gender": "Non-binary", "ip_address": "255.214.102.98", "dob": "2020-04-03"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "31.0", "first_name": "Christie", "last_name": "Krier", "email": "ckrieru@aboutads.info", "gender": "Bigender", "ip_address": "29.122.167.180", "dob": "2020-09-24"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "32.0", "first_name": "Joshuah", "last_name": "Braffington", "email": "jbraffingtonv@foxnews.com", "gender": "Agender", "ip_address": "189.155.6.135", "dob": "2020-09-22"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "33.0", "first_name": "Bailie", "last_name": "Fossey", "email": "bfosseyw@flickr.com", "gender": "Agender", "ip_address": "129.166.4.82", "dob": "2020-05-30"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "34.0", "first_name": "Westley", "last_name": "Kupper", "email": "wkupperx@a8.net", "gender": "Agender", "ip_address": "12.125.54.217", "dob": "2020-04-10"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "35.0", "first_name": "Allie", "last_name": "Moogan", "email": "amoogany@jigsy.com", "gender": "Male", "ip_address": "158.225.146.105", "dob": "2020-12-24"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "36.0", "first_name": "Obadias", "last_name": "Stammers", "email": "ostammersz@shinystat.com", "gender": "Polygender", "ip_address": "210.226.250.161", "dob": "2021-01-01"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "37.0", "first_name": "Philippine", "last_name": "Barhems", "email": "pbarhems10@ted.com", "gender": "Male", "ip_address": "169.205.179.145", "dob": "2021-01-14"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "38.0", "first_name": "Theo", "last_name": "Messitt", "email": "tmessitt11@deviantart.com", "gender": "Male", "ip_address": "103.212.77.16", "dob": "2020-09-20"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "39.0", "first_name": "Roch", "last_name": "Cuphus", "email": "rcuphus12@pinterest.com", "gender": "Agender", "ip_address": "43.96.220.113", "dob": "2020-12-20"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "40.0", "first_name": "Sinclair", "last_name": "Chittey", "email": "schittey13@tamu.edu", "gender": "Genderfluid", "ip_address": "128.194.26.163", "dob": "2020-10-10"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "41.0", "first_name": "Eleonore", "last_name": "Guerrieri", "email": "eguerrieri14@typepad.com", "gender": "Genderfluid", "ip_address": "79.210.103.73", "dob": "2020-07-17"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "42.0", "first_name": "Elana", "last_name": "Secret", "email": "esecret15@mysql.com", "gender": "Polygender", "ip_address": "102.139.145.231", "dob": "2021-01-15"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "43.0", "first_name": "Dennie", "last_name": "Prati", "email": "dprati16@nytimes.com", "gender": "Genderqueer", "ip_address": "51.119.24.56", "dob": "2020-10-06"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "44.0", "first_name": "Roderick", "last_name": "Dand", "email": "rdand17@gmpg.org", "gender": "Genderqueer", "ip_address": "188.187.179.115", "dob": "2020-11-26"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "45.0", "first_name": "Lonnie", "last_name": "Grigolashvill", "email": "lgrigolashvill18@hhs.gov", "gender": "Non-binary", "ip_address": "96.104.221.230", "dob": "2020-05-05"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "46.0", "first_name": "Leslie", "last_name": "Iddins", "email": "liddins19@sbwire.com", "gender": "Genderqueer", "ip_address": "77.228.177.247", "dob": "2020-06-27"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "47.0", "first_name": "Conant", "last_name": "Gaishson", "email": "cgaishson1a@oakley.com", "gender": "Agender", "ip_address": "71.118.171.42", "dob": "2020-12-29"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "48.0", "first_name": "Aileen", "last_name": "Derrell", "email": "aderrell1b@amazonaws.com", "gender": "Genderfluid", "ip_address": "233.79.86.81", "dob": "2020-06-02"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "49.0", "first_name": "Heywood", "last_name": "Poulston", "email": "hpoulston1c@opera.com", "gender": "Genderqueer", "ip_address": "115.6.245.150", "dob": "2020-04-16"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "50.0", "first_name": "Neddie", "last_name": "Rickert", "email": "nrickert1d@omniture.com", "gender": "Polygender", "ip_address": "25.55.171.143", "dob": "2020-11-17"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "51.0", "first_name": "Ronny", "last_name": "Bondley", "email": "rbondley1e@loc.gov", "gender": "Genderqueer", "ip_address": "33.164.53.233", "dob": "2020-05-22"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "52.0", "first_name": "Filippa", "last_name": "McCuis", "email": "fmccuis1f@desdev.cn", "gender": "Bigender", "ip_address": "30.78.184.43", "dob": "2021-01-12"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "53.0", "first_name": "Kipper", "last_name": "Corton", "email": "kcorton1g@t.co", "gender": "Bigender", "ip_address": "177.22.101.164", "dob": "2021-01-23"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "54.0", "first_name": "Clementine", "last_name": "Callen", "email": "ccallen1h@storify.com", "gender": "Genderfluid", "ip_address": "122.40.201.54", "dob": "2020-06-03"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "55.0", "first_name": "Silvie", "last_name": "Royse", "email": "sroyse1i@mapquest.com", "gender": "Genderqueer", "ip_address": "38.145.193.0", "dob": "2020-06-27"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "56.0", "first_name": "Noble", "last_name": "Purslow", "email": "npurslow1j@redcross.org", "gender": "Non-binary", "ip_address": "119.89.26.248", "dob": "2020-11-21"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "57.0", "first_name": "Marjy", "last_name": "Gloves", "email": "mgloves1k@drupal.org", "gender": "Genderqueer", "ip_address": "250.108.63.170", "dob": "2020-07-24"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "58.0", "first_name": "Ellwood", "last_name": "Gullam", "email": "egullam1l@google.cn", "gender": "Genderfluid", "ip_address": "128.65.236.88", "dob": "2020-05-07"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "59.0", "first_name": "Adora", "last_name": "Povele", "email": "apovele1m@statcounter.com", "gender": "Genderfluid", "ip_address": "215.67.227.145", "dob": "2020-06-23"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "60.0", "first_name": "Miles", "last_name": "Zapatero", "email": "mzapatero1n@ezinearticles.com", "gender": "Non-binary", "ip_address": "212.252.221.177", "dob": "2020-05-22"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "61.0", "first_name": "Eddie", "last_name": "Menichi", "email": "emenichi1o@about.com", "gender": "Genderqueer", "ip_address": "138.77.252.222", "dob": "2020-02-11"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "62.0", "first_name": "Jakob", "last_name": "Showalter", "email": "jshowalter1p@cargocollective.com", "gender": "Genderfluid", "ip_address": "138.186.250.131", "dob": "2021-01-05"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "63.0", "first_name": "Zebadiah", "last_name": "Geratt", "email": "zgeratt1q@surveymonkey.com", "gender": "Genderfluid", "ip_address": "239.69.201.221", "dob": "2020-05-02"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "64.0", "first_name": "Carleton", "last_name": "Gayther", "email": "cgayther1r@si.edu", "gender": "Genderqueer", "ip_address": "138.237.56.77", "dob": "2020-03-09"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "65.0", "first_name": "Gwendolyn", "last_name": "Cotgrave", "email": "gcotgrave1s@dyndns.org", "gender": "Agender", "ip_address": "103.26.18.169", "dob": "2020-06-26"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "66.0", "first_name": "Nikki", "last_name": "Corry", "email": "ncorry1t@dedecms.com", "gender": "Female", "ip_address": "118.138.87.91", "dob": "2020-08-26"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "67.0", "first_name": "Kat", "last_name": "Figgins", "email": "kfiggins1u@jugem.jp", "gender": "Male", "ip_address": "202.202.94.181", "dob": "2020-06-19"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "68.0", "first_name": "Norean", "last_name": "Trendle", "email": "ntrendle1v@elpais.com", "gender": "Genderqueer", "ip_address": "134.89.22.248", "dob": "2020-08-24"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "69.0", "first_name": "Foster", "last_name": "Durker", "email": "fdurker1w@engadget.com", "gender": "Non-binary", "ip_address": "189.149.34.80", "dob": "2020-11-02"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "70.0", "first_name": "Rod", "last_name": "Jarnell", "email": "rjarnell1x@sphinn.com", "gender": "Genderfluid", "ip_address": "169.148.199.234", "dob": "2020-08-19"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "71.0", "first_name": "Lancelot", "last_name": "Plaxton", "email": "lplaxton1y@spiegel.de", "gender": "Agender", "ip_address": "81.194.71.38", "dob": "2020-09-07"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "72.0", "first_name": "Rudyard", "last_name": "Olliff", "email": "rolliff1z@bbb.org", "gender": "Agender", "ip_address": "113.39.154.178", "dob": "2021-01-07"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "73.0", "first_name": "Shelley", "last_name": "Lipprose", "email": "slipprose20@engadget.com", "gender": "Polygender", "ip_address": "117.254.24.20", "dob": "2021-01-03"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "74.0", "first_name": "Prudi", "last_name": "Boichat", "email": "pboichat21@cam.ac.uk", "gender": "Agender", "ip_address": "99.169.9.122", "dob": "2020-08-25"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "75.0", "first_name": "Denny", "last_name": "Bollum", "email": "dbollum22@skyrock.com", "gender": "Bigender", "ip_address": "77.112.28.180", "dob": "2020-07-31"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "76.0", "first_name": "Lucila", "last_name": "Charteris", "email": "lcharteris23@linkedin.com", "gender": "Genderfluid", "ip_address": "194.161.40.83", "dob": "2020-05-17"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "77.0", "first_name": "Marrissa", "last_name": "Wurz", "email": "mwurz24@pinterest.com", "gender": "Agender", "ip_address": "72.219.43.46", "dob": "2020-09-04"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "78.0", "first_name": "Teresina", "last_name": "Micklewicz", "email": "tmicklewicz25@goo.ne.jp", "gender": "Genderqueer", "ip_address": "214.116.247.204", "dob": "2020-09-13"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "79.0", "first_name": "Idette", "last_name": "Ilieve", "email": "iilieve26@mozilla.com", "gender": "Bigender", "ip_address": "25.25.28.17", "dob": "2020-09-21"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "80.0", "first_name": "Noemi", "last_name": "Lempenny", "email": "nlempenny27@jugem.jp", "gender": "Bigender", "ip_address": "194.139.183.130", "dob": "2020-09-01"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "81.0", "first_name": "Faye", "last_name": "Ashbee", "email": "fashbee28@google.com", "gender": "Bigender", "ip_address": "191.149.120.198", "dob": "2020-03-12"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "82.0", "first_name": "Olly", "last_name": "Siaspinski", "email": "osiaspinski29@amazonaws.com", "gender": "Polygender", "ip_address": "150.134.136.240", "dob": "2020-07-12"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "83.0", "first_name": "Marji", "last_name": "Dahlen", "email": "mdahlen2a@zdnet.com", "gender": "Bigender", "ip_address": "185.226.214.79", "dob": "2020-11-28"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "84.0", "first_name": "Aubine", "last_name": "Genner", "email": "agenner2b@chronoengine.com", "gender": "Genderfluid", "ip_address": "109.51.123.153", "dob": "2020-03-28"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "85.0", "first_name": "Dix", "last_name": "Civitillo", "email": "dcivitillo2c@bluehost.com", "gender": "Female", "ip_address": "112.89.157.163", "dob": "2020-05-30"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "86.0", "first_name": "Birk", "last_name": "Mussolini", "email": "bmussolini2d@wikimedia.org", "gender": "Agender", "ip_address": "235.49.78.159", "dob": "2020-03-05"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "87.0", "first_name": "Lenci", "last_name": "Wager", "email": "lwager2e@fda.gov", "gender": "Agender", "ip_address": "113.145.228.184", "dob": "2020-03-27"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "88.0", "first_name": "Avrit", "last_name": "Yosifov", "email": "ayosifov2f@umn.edu", "gender": "Male", "ip_address": "112.171.167.81", "dob": "2021-01-18"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "89.0", "first_name": "Honor", "last_name": "McMorran", "email": "hmcmorran2g@bbc.co.uk", "gender": "Genderqueer", "ip_address": "11.179.26.90", "dob": "2020-04-07"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "90.0", "first_name": "Lilah", "last_name": "Carnall", "email": "lcarnall2h@barnesandnoble.com", "gender": "Polygender", "ip_address": "51.194.48.153", "dob": "2020-06-13"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "91.0", "first_name": "Daffie", "last_name": "Cheke", "email": "dcheke2i@theatlantic.com", "gender": "Polygender", "ip_address": "158.53.238.38", "dob": "2020-11-12"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "92.0", "first_name": "Ariel", "last_name": "Minor", "email": "aminor2j@blogger.com", "gender": "Polygender", "ip_address": "29.0.88.144", "dob": "2020-07-16"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "93.0", "first_name": "Kenna", "last_name": "Spraggon", "email": "kspraggon2k@google.fr", "gender": "Agender", "ip_address": "139.245.147.77", "dob": "2020-11-15"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "94.0", "first_name": "Evelyn", "last_name": "Oleshunin", "email": "eoleshunin2l@istockphoto.com", "gender": "Genderqueer", "ip_address": "26.117.119.59", "dob": "2020-08-11"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "95.0", "first_name": "Marcel", "last_name": "Kuhnt", "email": "mkuhnt2m@google.com.au", "gender": "Genderfluid", "ip_address": "84.158.205.130", "dob": "2020-08-22"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "96.0", "first_name": "Wendye", "last_name": "Wigelsworth", "email": "wwigelsworth2n@webs.com", "gender": "Polygender", "ip_address": "241.71.79.173", "dob": "2020-02-26"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "97.0", "first_name": "Nonie", "last_name": "Cadany", "email": "ncadany2o@cdbaby.com", "gender": "Female", "ip_address": "87.132.223.229", "dob": "2020-05-30"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "98.0", "first_name": "Arthur", "last_name": "Norsister", "email": "anorsister2p@csmonitor.com", "gender": "Male", "ip_address": "21.50.95.6", "dob": "2020-05-13"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "99.0", "first_name": "Auria", "last_name": "Haryngton", "email": "aharyngton2q@mapquest.com", "gender": "Non-binary", "ip_address": "246.28.159.95", "dob": "2020-06-28"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "100.0", "first_name": "Phelia", "last_name": "Simmig", "email": "psimmig2r@example.com", "gender": "Agender", "ip_address": "205.35.103.161", "dob": "2020-04-05"}, "emitted_at": 1649842201000} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/sheet.py b/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/sheet.py new file mode 100644 index 00000000000000..5708492259375f --- /dev/null +++ b/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/sheet.py @@ -0,0 +1,92 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + +import logging +from functools import cached_property +from typing import Any, Dict, Iterable, Mapping, Optional, Tuple + +import smartsheet + + +class SmartSheetAPIWrapper: + def __init__(self, config: Mapping[str, Any]): + self._spreadsheet_id = config["spreadsheet_id"] + self._access_token = config["access_token"] + api_client = smartsheet.Smartsheet(self._access_token) + api_client.errors_as_exceptions(True) + # each call to `Sheets` makes a new instance, so we save it here to make no more new objects + self._get_sheet = api_client.Sheets.get_sheet + self._data = None + + def _fetch_sheet(self, from_dt: Optional[str] = None) -> None: + kwargs = {"rows_modified_since": from_dt} + if not from_dt: + kwargs["page_size"] = 1 + self._data = self._get_sheet(self._spreadsheet_id, **kwargs) + + @staticmethod + def _column_to_property(column_type: str) -> Dict[str, any]: + type_mapping = { + "TEXT_NUMBER": {"type": "string"}, + "DATE": {"type": "string", "format": "date"}, + "DATETIME": {"type": "string", "format": "date-time"}, + } + return type_mapping.get(column_type, {"type": "string"}) + + def _construct_record(self, row: smartsheet.models.Row) -> Dict[str, str]: + values_column_map = {cell.column_id: str(cell.value or "") for cell in row.cells} + record = {column.title: values_column_map[column.id] for column in self.data.columns} + record["modifiedAt"] = row.modified_at.isoformat() + return record + + @property + def data(self) -> smartsheet.models.Row: + if not self._data: + self._fetch_sheet() + return self._data + + @property + def name(self) -> str: + return self.data.name + + @property + def row_count(self) -> int: + return len(self.data.rows) + + @cached_property + def primary_key(self) -> str: + for column in self.data.columns: + if column.primary: + return column.title + + @cached_property + def json_schema(self) -> Dict[str, Any]: + column_info = {column.title: self._column_to_property(column.type.value) for column in self.data.columns} + column_info["modifiedAt"] = {"type": "string", "format": "date-time"} # add cursor field explicitly + json_schema = { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": column_info, + } + return json_schema + + def read_records(self, from_dt: str) -> Iterable[Dict[str, str]]: + self._fetch_sheet(from_dt) + for row in self.data.rows: + yield self._construct_record(row) + + def check_connection(self, logger: logging.Logger) -> Tuple[bool, Optional[str]]: + try: + _ = self.data + except smartsheet.exceptions.ApiError as e: + err = e.error.result + code = 404 if err.code == 1006 else err.code + reason = f"{err.name}: {code} - {err.message} | Check your spreadsheet ID." + logger.error(reason) + return False, reason + except Exception as e: + reason = str(e) + logger.error(reason) + return False, reason + return True, None diff --git a/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/source.py b/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/source.py index 542cdb04210353..3bce5c71f7e4b0 100644 --- a/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/source.py +++ b/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/source.py @@ -2,120 +2,21 @@ # Copyright (c) 2021 Airbyte, Inc., all rights reserved. # +import logging +from typing import Any, List, Mapping, Tuple -import json -from datetime import datetime -from typing import Dict, Generator, List +from airbyte_cdk.sources import AbstractSource +from airbyte_cdk.sources.streams import Stream -import smartsheet -from airbyte_cdk import AirbyteLogger -from airbyte_cdk.models import ( - AirbyteCatalog, - AirbyteConnectionStatus, - AirbyteMessage, - AirbyteRecordMessage, - AirbyteStream, - ConfiguredAirbyteCatalog, - Status, - Type, -) -from airbyte_cdk.sources import Source +from .sheet import SmartSheetAPIWrapper +from .streams import SmartsheetStream -def get_prop(col_type: str) -> Dict[str, any]: - props = { - "TEXT_NUMBER": {"type": "string"}, - "DATE": {"type": "string", "format": "date"}, - "DATETIME": {"type": "string", "format": "date-time"}, - } - return props.get(col_type, {"type": "string"}) +class SourceSmartsheets(AbstractSource): + def check_connection(self, logger: logging.Logger, config: Mapping[str, Any]) -> Tuple[bool, any]: + sheet = SmartSheetAPIWrapper(config) + return sheet.check_connection(logger) - -def construct_record(sheet_columns: List[Dict], row_cells: List[Dict]) -> Dict: - # convert all data to string as it is only expected format in schema - values_column_map = {cell["columnId"]: str(cell.get("value", "")) for cell in row_cells} - return {column["title"]: values_column_map[column["id"]] for column in sheet_columns} - - -def get_json_schema(sheet_columns: List[Dict]) -> Dict: - column_info = {column["title"]: get_prop(column["type"]) for column in sheet_columns} - json_schema = { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": column_info, - } - return json_schema - - -class SourceSmartsheets(Source): - def check(self, logger: AirbyteLogger, config: json) -> AirbyteConnectionStatus: - try: - access_token = config["access_token"] - spreadsheet_id = config["spreadsheet_id"] - - smartsheet_client = smartsheet.Smartsheet(access_token) - smartsheet_client.errors_as_exceptions(True) - smartsheet_client.Sheets.get_sheet(spreadsheet_id) - - return AirbyteConnectionStatus(status=Status.SUCCEEDED) - except Exception as e: - if isinstance(e, smartsheet.exceptions.ApiError): - err = e.error.result - code = 404 if err.code == 1006 else err.code - reason = f"{err.name}: {code} - {err.message} | Check your spreadsheet ID." - else: - reason = str(e) - logger.error(reason) - return AirbyteConnectionStatus(status=Status.FAILED) - - def discover(self, logger: AirbyteLogger, config: json) -> AirbyteCatalog: - access_token = config["access_token"] - spreadsheet_id = config["spreadsheet_id"] - streams = [] - - smartsheet_client = smartsheet.Smartsheet(access_token) - try: - sheet = smartsheet_client.Sheets.get_sheet(spreadsheet_id) - sheet = json.loads(str(sheet)) # make it subscriptable - sheet_json_schema = get_json_schema(sheet["columns"]) - logger.info(f"Running discovery on sheet: {sheet['name']} with {spreadsheet_id}") - - stream = AirbyteStream(name=sheet["name"], json_schema=sheet_json_schema) - stream.supported_sync_modes = ["full_refresh"] - streams.append(stream) - - except Exception as e: - raise Exception(f"Could not run discovery: {str(e)}") - - return AirbyteCatalog(streams=streams) - - def read( - self, logger: AirbyteLogger, config: json, catalog: ConfiguredAirbyteCatalog, state: Dict[str, any] - ) -> Generator[AirbyteMessage, None, None]: - - access_token = config["access_token"] - spreadsheet_id = config["spreadsheet_id"] - smartsheet_client = smartsheet.Smartsheet(access_token) - - for configured_stream in catalog.streams: - stream = configured_stream.stream - try: - sheet = smartsheet_client.Sheets.get_sheet(spreadsheet_id) - sheet = json.loads(str(sheet)) # make it subscriptable - logger.info(f"Starting syncing spreadsheet {sheet['name']}") - logger.info(f"Row count: {sheet['totalRowCount']}") - - for row in sheet["rows"]: - try: - record = construct_record(sheet["columns"], row["cells"]) - yield AirbyteMessage( - type=Type.RECORD, - record=AirbyteRecordMessage(stream=stream.name, data=record, emitted_at=int(datetime.now().timestamp()) * 1000), - ) - except Exception as e: - logger.error(f"Unable to encode row into an AirbyteMessage with the following error: {e}") - - except Exception as e: - logger.error(f"Could not read smartsheet: {stream.name}") - raise e - logger.info(f"Finished syncing spreadsheet with ID: {spreadsheet_id}") + def streams(self, config: Mapping[str, Any]) -> List["Stream"]: + sheet = SmartSheetAPIWrapper(config) + return [SmartsheetStream(sheet, config)] diff --git a/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/spec.json b/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/spec.json index 57876a9a81a15f..93c5d422ea2366 100644 --- a/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/spec.json +++ b/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/spec.json @@ -17,6 +17,14 @@ "title": "Sheet ID", "description": "The spreadsheet ID. Find in the spreadsheet menu: File > Properties", "type": "string" + }, + "start_datetime": { + "title": "Start Datetime", + "type": "string", + "examples": ["2000-01-01T13:00:00", "2000-01-01T13:00:00-07:00"], + "description": "ISO 8601, for instance: `YYYY-MM-DDTHH:MM:SS`, `YYYY-MM-DDTHH:MM:SS+HH:MM`", + "format": "date-time", + "default": "2020-01-01T00:00:00+00:00" } } }, diff --git a/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/streams.py b/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/streams.py new file mode 100644 index 00000000000000..5a589392376092 --- /dev/null +++ b/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/streams.py @@ -0,0 +1,58 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + +import datetime +from typing import Any, Dict, Iterable, List, Mapping + +from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources.streams import Stream +from source_smartsheets.sheet import SmartSheetAPIWrapper + + +class SmartsheetStream(Stream): + cursor_field = "modifiedAt" + + def __init__(self, smartsheet: SmartSheetAPIWrapper, config: Mapping[str, Any]): + self.smartsheet = smartsheet + self._state = {} + self._config = config + self._start_datetime = self._config.get("start_datetime") or "2020-01-01T00:00:00+00:00" + + @property + def primary_key(self) -> str: + return self.smartsheet.primary_key + + def get_json_schema(self) -> Dict[str, Any]: + return self.smartsheet.json_schema + + @property + def name(self) -> str: + return self.smartsheet.name + + @property + def state(self) -> Mapping[str, Any]: + if not self._state: + self._state = {self.cursor_field: self._start_datetime} + return self._state + + @state.setter + def state(self, value: Mapping[str, Any]): + self._state = value + + def read_records( + self, + sync_mode: SyncMode, + cursor_field: List[str] = None, + stream_slice: Mapping[str, Any] = None, + stream_state: Mapping[str, Any] = None, + ) -> Iterable[Mapping[str, Any]]: + def iso_dt(src): + return datetime.datetime.fromisoformat(src) + + for record in self.smartsheet.read_records(self.state[self.cursor_field]): + current_cursor_value = iso_dt(self.state[self.cursor_field]) + latest_cursor_value = iso_dt(record[self.cursor_field]) + new_cursor_value = max(latest_cursor_value, current_cursor_value) + self.state = {self.cursor_field: new_cursor_value.isoformat("T", "seconds")} + yield record diff --git a/airbyte-integrations/connectors/source-smartsheets/unit_tests/conftest.py b/airbyte-integrations/connectors/source-smartsheets/unit_tests/conftest.py new file mode 100644 index 00000000000000..e168f2fe831c51 --- /dev/null +++ b/airbyte-integrations/connectors/source-smartsheets/unit_tests/conftest.py @@ -0,0 +1,34 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + +import json +from pathlib import Path +from unittest.mock import Mock + +import pytest +from smartsheet.models import Sheet + +HERE = Path(__file__).parent.absolute() + + +@pytest.fixture +def response_mock(): + with open(HERE / "response.json") as json_file: + return json.loads(json_file.read()) + + +@pytest.fixture +def config(): + return {"spreadsheet_id": "id", "access_token": "token"} + + +@pytest.fixture +def get_sheet_mocker(mocker, response_mock): + def _mocker(api_wrapper, data=None): + sheet_obj = Sheet(props=response_mock, base_obj=api_wrapper) + get_sheet_mock = Mock(return_value=sheet_obj) + mocker.patch.object(api_wrapper, "_get_sheet", data or get_sheet_mock) + return get_sheet_mock, sheet_obj + + return _mocker diff --git a/airbyte-integrations/connectors/source-smartsheets/unit_tests/response.json b/airbyte-integrations/connectors/source-smartsheets/unit_tests/response.json new file mode 100644 index 00000000000000..99e8122ceef2df --- /dev/null +++ b/airbyte-integrations/connectors/source-smartsheets/unit_tests/response.json @@ -0,0 +1,251 @@ +{ + "accessLevel": "OWNER", + "columns": [ + { + "id": 1101932201830276, + "index": 0, + "primary": true, + "title": "id", + "type": "TEXT_NUMBER", + "validation": false, + "version": 0, + "width": 51 + }, + { + "id": 5605531829200772, + "index": 1, + "title": "first_name", + "type": "TEXT_NUMBER", + "validation": false, + "version": 0, + "width": 275 + }, + { + "id": 3353732015515524, + "index": 2, + "title": "last_name", + "type": "TEXT_NUMBER", + "validation": false, + "version": 0, + "width": 224 + }, + { + "id": 7857331642886020, + "index": 3, + "title": "email", + "type": "TEXT_NUMBER", + "validation": false, + "version": 0, + "width": 231 + }, + { + "id": 2227832108672900, + "index": 4, + "options": [ + "Agender", + "Bigender", + "Female", + "Genderfluid", + "Genderqueer", + "Male", + "Non-binary", + "Polygender" + ], + "title": "gender", + "type": "PICKLIST", + "validation": false, + "version": 0, + "width": 193 + }, + { + "id": 6731431736043396, + "index": 5, + "title": "ip_address", + "type": "TEXT_NUMBER", + "validation": false, + "version": 0, + "width": 206 + }, + { + "id": 4479631922358148, + "index": 6, + "title": "dob", + "type": "DATE", + "validation": false, + "version": 0, + "width": 201 + } + ], + "createdAt": "2021-08-27T11:36:41+00:00", + "dependenciesEnabled": false, + "effectiveAttachmentOptions": [ + "GOOGLE_DRIVE", + "DROPBOX", + "ONEDRIVE", + "LINK", + "EVERNOTE", + "BOX_COM", + "FILE", + "EGNYTE" + ], + "ganttEnabled": false, + "hasSummaryFields": false, + "id": 679252988323716, + "modifiedAt": "2022-04-13T06:50:10+00:00", + "name": "aws_s3_sample", + "permalink": "https://app.smartsheet.com/sheets/v7vHw7qHJChcvfHQ8j3xJpG8H82Fh39Rc9PRGvQ1", + "resourceManagementEnabled": false, + "rows": [ + { + "cells": [ + { "columnId": 1101932201830276, "displayValue": "1", "value": 1.0 }, + { + "columnId": 5605531829200772, + "displayValue": "Joni", + "value": "Joni" + }, + { + "columnId": 3353732015515524, + "displayValue": "Watling", + "value": "Watling" + }, + { + "columnId": 7857331642886020, + "displayValue": "jwatling0@amazonaws.com", + "value": "jwatling0@amazonaws.com" + }, + { + "columnId": 2227832108672900, + "displayValue": "Genderqueer", + "value": "Genderqueer" + }, + { + "columnId": 6731431736043396, + "displayValue": "195.50.216.194", + "value": "195.50.216.194" + }, + { "columnId": 4479631922358148, "value": "2020-11-23" } + ], + "createdAt": "2021-08-27T11:36:41+00:00", + "expanded": true, + "id": 3201922565072772, + "modifiedAt": "2021-08-27T11:36:41+00:00", + "rowNumber": 1 + }, + { + "cells": [ + { "columnId": 1101932201830276, "displayValue": "2", "value": 2.0 }, + { + "columnId": 5605531829200772, + "displayValue": "Bernardo", + "value": "Bernardo" + }, + { + "columnId": 3353732015515524, + "displayValue": "Klaaassen", + "value": "Klaaassen" + }, + { + "columnId": 7857331642886020, + "displayValue": "bklaaassen1@cbc.ca", + "value": "bklaaassen1@cbc.ca" + }, + { + "columnId": 2227832108672900, + "displayValue": "Polygender", + "value": "Polygender" + }, + { + "columnId": 6731431736043396, + "displayValue": "116.208.253.97", + "value": "116.208.253.97" + }, + { "columnId": 4479631922358148, "value": "2020-02-22" } + ], + "createdAt": "2021-08-27T11:36:41+00:00", + "expanded": true, + "id": 7705522192443268, + "modifiedAt": "2021-08-27T11:36:41+00:00", + "rowNumber": 2, + "siblingId": 3201922565072772 + }, + { + "cells": [ + { "columnId": 1101932201830276, "displayValue": "3", "value": 3.0 }, + { + "columnId": 5605531829200772, + "displayValue": "Drake", + "value": "Drake" + }, + { + "columnId": 3353732015515524, + "displayValue": "Bednell", + "value": "Bednell" + }, + { + "columnId": 7857331642886020, + "displayValue": "dbednell2@theguardian.com", + "value": "dbednell2@theguardian.com" + }, + { + "columnId": 2227832108672900, + "displayValue": "Non-binary", + "value": "Non-binary" + }, + { + "columnId": 6731431736043396, + "displayValue": "120.15.24.132", + "value": "120.15.24.132" + }, + { "columnId": 4479631922358148, "value": "2020-08-21" } + ], + "createdAt": "2021-08-27T11:36:41+00:00", + "expanded": true, + "id": 2076022658230148, + "modifiedAt": "2021-08-27T11:36:41+00:00", + "rowNumber": 3, + "siblingId": 7705522192443268 + }, + { + "cells": [ + { "columnId": 1101932201830276, "displayValue": "4", "value": 4.0 }, + { + "columnId": 5605531829200772, + "displayValue": "Alfreda", + "value": "Alfreda" + }, + { + "columnId": 3353732015515524, + "displayValue": "Brumbye", + "value": "Brumbye" + }, + { + "columnId": 7857331642886020, + "displayValue": "abrumbye3@howstuffworks.com", + "value": "abrumbye3@howstuffworks.com" + }, + { + "columnId": 2227832108672900, + "displayValue": "Genderqueer", + "value": "Genderqueer" + }, + { + "columnId": 6731431736043396, + "displayValue": "64.22.217.122", + "value": "64.22.217.122" + }, + { "columnId": 4479631922358148, "value": "2020-12-29" } + ], + "createdAt": "2021-08-27T11:36:41+00:00", + "expanded": true, + "id": 6579622285600644, + "modifiedAt": "2021-08-27T11:36:41+00:00", + "rowNumber": 4, + "siblingId": 2076022658230148 + } + ], + "totalRowCount": 100, + "userPermissions": { "summaryPermissions": "ADMIN" }, + "userSettings": { "criticalPathEnabled": false, "displaySummaryTasks": true }, + "version": 9 +} diff --git a/airbyte-integrations/connectors/source-smartsheets/unit_tests/test_sheets.py b/airbyte-integrations/connectors/source-smartsheets/unit_tests/test_sheets.py new file mode 100644 index 00000000000000..662c7b24882b50 --- /dev/null +++ b/airbyte-integrations/connectors/source-smartsheets/unit_tests/test_sheets.py @@ -0,0 +1,119 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + +import logging +from itertools import permutations +from unittest.mock import ANY, Mock + +import pytest +from smartsheet.exceptions import ApiError +from source_smartsheets.sheet import SmartSheetAPIWrapper + + +def test_fetch_sheet(config, get_sheet_mocker): + spreadsheet_id = config["spreadsheet_id"] + sheet = SmartSheetAPIWrapper(config) + mock, resp = get_sheet_mocker(sheet) + + sheet._fetch_sheet() + mock.assert_called_once_with(spreadsheet_id, rows_modified_since=None, page_size=1) + assert sheet.data == resp + + sheet._fetch_sheet(from_dt="2022-03-04T00:00:00Z") + mock.assert_called_with(spreadsheet_id, rows_modified_since="2022-03-04T00:00:00Z") + assert sheet.data == resp + + +def test_properties(config, get_sheet_mocker): + sheet = SmartSheetAPIWrapper(config) + _, resp = get_sheet_mocker(sheet) + assert sheet.data == resp + assert sheet.name == "aws_s3_sample" + assert sheet.row_count == 4 + assert sheet.primary_key == "id" + + +@pytest.mark.parametrize( + ("column_type", "expected_schema"), + ( + ("TEXT_NUMBER", {"type": "string"}), + ("DATE", {"type": "string", "format": "date"}), + ("DATETIME", {"type": "string", "format": "date-time"}), + ("DURATION", {"type": "string"}), + ), +) +def test_column_types(config, column_type, expected_schema): + sheet = SmartSheetAPIWrapper(config) + assert sheet._column_to_property(column_type) == expected_schema + + +def test_json_schema(config, get_sheet_mocker): + sheet = SmartSheetAPIWrapper(config) + _ = get_sheet_mocker(sheet) + json_schema = sheet.json_schema + assert json_schema["$schema"] == "http://json-schema.org/draft-07/schema#" + assert json_schema["type"] == "object" + assert "properties" in json_schema + assert "modifiedAt" in json_schema["properties"] + + +def _make_api_error(code, message, name): + result_mock = Mock(code=code, message=message) + result_mock.name = name + return ApiError(error=Mock(result=result_mock)) + + +@pytest.mark.parametrize( + ("side_effect", "expected_error"), + ( + (Exception("Internal Server Error"), "Internal Server Error"), + ( + _make_api_error(code=1006, message="Resource not found", name="Not Found"), + "Not Found: 404 - Resource not found | Check your spreadsheet ID.", + ), + ( + _make_api_error(code=4003, message="Too many requests", name="Limit reached"), + "Limit reached: 4003 - Too many requests | Check your spreadsheet ID.", + ), + ), +) +def test_check_connection_fail(mocker, config, side_effect, expected_error): + sheet = SmartSheetAPIWrapper(config) + with mocker.patch.object(sheet, "_get_sheet", side_effect=side_effect): + status, error = sheet.check_connection(logger=logging.getLogger()) + assert error == expected_error + assert status is False + + +def test_check_connection_success(mocker, config): + sheet = SmartSheetAPIWrapper(config) + with mocker.patch.object(sheet, "_get_sheet"): + status, error = sheet.check_connection(logger=logging.getLogger()) + assert error is None + assert status is True + + +_columns = [ + Mock(id="1101932201830276", title="id", type="TEXT_NUMBER"), + Mock(id="5605531829200772", title="first_name", type="TEXT_NUMBER"), + Mock(id="3353732015515524", title="last_name", type="TEXT_NUMBER"), +] + + +_cells = [ + Mock(column_id="1101932201830276", value="11"), + Mock(column_id="5605531829200772", value="Leonardo"), + Mock(column_id="3353732015515524", value="Dicaprio"), +] + + +@pytest.mark.parametrize(("row", "columns"), (*((perm, _columns) for perm in permutations(_cells)), ([], _columns), ([], []))) +def test_different_cell_order_produces_same_result(get_sheet_mocker, config, row, columns): + sheet = SmartSheetAPIWrapper(config) + sheet_mock = Mock(rows=[Mock(cells=row)] if row else [], columns=columns) + get_sheet_mocker(sheet, data=Mock(return_value=sheet_mock)) + + records = sheet.read_records(from_dt="2020-01-01T00:00:00Z") + expected_records = [] if not row else [{"id": "11", "first_name": "Leonardo", "last_name": "Dicaprio", "modifiedAt": ANY}] + assert list(records) == expected_records diff --git a/airbyte-integrations/connectors/source-smartsheets/unit_tests/test_source.py b/airbyte-integrations/connectors/source-smartsheets/unit_tests/test_source.py index 09bfcec10c48be..4b71c4d0ceddba 100644 --- a/airbyte-integrations/connectors/source-smartsheets/unit_tests/test_source.py +++ b/airbyte-integrations/connectors/source-smartsheets/unit_tests/test_source.py @@ -2,45 +2,23 @@ # Copyright (c) 2021 Airbyte, Inc., all rights reserved. # -import json -from itertools import permutations +import logging from unittest.mock import Mock import pytest from source_smartsheets.source import SourceSmartsheets +from source_smartsheets.streams import SmartsheetStream -@pytest.fixture -def config(): - return {"access_token": "token", "spreadsheet_id": "id"} - - -@pytest.fixture(name="catalog") -def configured_catalog(): - stream_mock = Mock() - stream_mock.name = "test" # cannot be used in __init__ - return Mock(streams=[Mock(stream=stream_mock)]) - - -_columns = [ - {"id": "1101932201830276", "title": "id", "type": "TEXT_NUMBER"}, - {"id": "5605531829200772", "title": "first_name", "type": "TEXT_NUMBER"}, - {"id": "3353732015515524", "title": "last_name", "type": "TEXT_NUMBER"}, -] - - -_cells = [ - {"columnId": "1101932201830276", "value": "11"}, - {"columnId": "5605531829200772", "value": "Leonardo"}, - {"columnId": "3353732015515524", "value": "Dicaprio"}, -] +@pytest.mark.parametrize("connection_status", ((True, None), (False, "Internal Server Error"))) +def test_check_connection(mocker, config, connection_status): + mocker.patch("source_smartsheets.source.SmartSheetAPIWrapper.check_connection", Mock(return_value=connection_status)) + source = SourceSmartsheets() + assert source.check_connection(logger=logging.getLogger(), config=config) == connection_status -@pytest.mark.parametrize(("row", "columns"), (*((perm, _columns) for perm in permutations(_cells)), ([], _columns), ([], []))) -def test_different_cell_order_produces_one_result(mocker, config, catalog, row, columns): - sheet = json.dumps({"name": "test", "totalRowCount": 3, "columns": columns, "rows": [{"cells": row}] if row else []}) - mocker.patch("smartsheet.Smartsheet", Mock(return_value=Mock(Sheets=Mock(get_sheet=Mock(return_value=sheet))))) +def test_streams(config): source = SourceSmartsheets() - records = [message.record.data for message in source.read(logger=Mock(), config=config, catalog=catalog, state={})] - expected_records = [] if not row else [{"id": "11", "first_name": "Leonardo", "last_name": "Dicaprio"}] - assert list(records) == expected_records + streams_iter = iter(source.streams(config)) + assert type(next(streams_iter)) == SmartsheetStream + assert next(streams_iter, None) is None diff --git a/airbyte-integrations/connectors/source-smartsheets/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-smartsheets/unit_tests/test_streams.py new file mode 100644 index 00000000000000..eec95654047d16 --- /dev/null +++ b/airbyte-integrations/connectors/source-smartsheets/unit_tests/test_streams.py @@ -0,0 +1,21 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + +import datetime +from unittest.mock import Mock + +from airbyte_cdk.models import SyncMode +from source_smartsheets.streams import SmartsheetStream + + +def test_state_saved_after_each_record(config, get_sheet_mocker): + today_dt = datetime.datetime.now(datetime.timezone.utc) + before_yesterday = (today_dt - datetime.timedelta(days=2)).isoformat(timespec="seconds") + today = today_dt.isoformat(timespec="seconds") + record = {"id": "1", "name": "Georgio", "last_name": "Armani", "modifiedAt": today} + stream = SmartsheetStream(Mock(read_records=Mock(return_value=[record])), config) + stream.state = {stream.cursor_field: before_yesterday} + for _ in stream.read_records(SyncMode.incremental): + assert _ == record + assert stream.state == {stream.cursor_field: today} diff --git a/docs/integrations/sources/smartsheets.md b/docs/integrations/sources/smartsheets.md index afdf7682f02d80..595e348d673d21 100644 --- a/docs/integrations/sources/smartsheets.md +++ b/docs/integrations/sources/smartsheets.md @@ -86,7 +86,8 @@ To setup your new Smartsheets source, Airbyte will need: ## Changelog -| Version | Date | Pull Request | Subject | -|:--------|:-----------|:---------------------------------------------------------|:--------------------------| -| 0.1.9 | 2022-04-12 | [11911](https://github.com/airbytehq/airbyte/pull/11911) | Bugfix: scrambled columns | -| 0.1.8 | 2022-02-04 | [9792](https://github.com/airbytehq/airbyte/pull/9792) | Added oauth support | +| Version | Date | Pull Request | Subject | +|:--------|:-----------|:---------------------------------------------------------|:----------------------------------------------------------| +| 0.1.10 | 2022-04-15 | [12077](https://github.com/airbytehq/airbyte/pull/12077) | Implement incremental read and improve code test coverage | +| 0.1.9 | 2022-04-12 | [11911](https://github.com/airbytehq/airbyte/pull/11911) | Bugfix: scrambled columns | +| 0.1.8 | 2022-02-04 | [9792](https://github.com/airbytehq/airbyte/pull/9792) | Added oauth support | From a33cbf6b186b32e03548bc82f7cb715b68b87f88 Mon Sep 17 00:00:00 2001 From: Edmundo Ruiz Ghanem <168664+edmundito@users.noreply.github.com> Date: Wed, 27 Apr 2022 14:57:12 -0400 Subject: [PATCH 012/152] Update empty states for connections, sources, and destinations (#12320) * Update empty connections page with new empty state * Add EmptyListPage component * Update src/dest/conn CTAs to capitalize the word New * Update empty states for sources and destinations pages * Rename EmptyListPage -> EmptyResourceListPage component Add data-id to EmptyListResourcePage button * Update EmptyResourceList prop from enable create button to disable * EmptyResourceListPage -> EmptyResourceListState * EmptyResourceListState -> EmptyResourceListView --- airbyte-webapp/public/images/bowtie-half.svg | 31 ++++++ .../images/octavia/empty-connections.png | Bin 0 -> 18118 bytes .../images/octavia/empty-destinations.png | Bin 0 -> 17703 bytes .../public/images/octavia/empty-sources.png | Bin 0 -> 17554 bytes .../EmptyResourceListView.tsx | 96 ++++++++++++++++++ .../components/EmptyResourceListView/index.ts | 1 + .../src/components/base/Button/Button.tsx | 8 +- .../components/base/Button/LoadingButton.tsx | 8 +- .../src/components/base/Button/types.tsx | 4 +- airbyte-webapp/src/locales/en.json | 9 +- .../AllConnectionsPage/AllConnectionsPage.tsx | 46 +++++---- .../AllDestinationsPage.tsx | 14 ++- .../pages/AllSourcesPage/AllSourcesPage.tsx | 8 +- .../Sections/auth/GoogleAuthButton.tsx | 4 +- 14 files changed, 182 insertions(+), 47 deletions(-) create mode 100644 airbyte-webapp/public/images/bowtie-half.svg create mode 100644 airbyte-webapp/public/images/octavia/empty-connections.png create mode 100644 airbyte-webapp/public/images/octavia/empty-destinations.png create mode 100644 airbyte-webapp/public/images/octavia/empty-sources.png create mode 100644 airbyte-webapp/src/components/EmptyResourceListView/EmptyResourceListView.tsx create mode 100644 airbyte-webapp/src/components/EmptyResourceListView/index.ts diff --git a/airbyte-webapp/public/images/bowtie-half.svg b/airbyte-webapp/public/images/bowtie-half.svg new file mode 100644 index 00000000000000..48372107abbd2d --- /dev/null +++ b/airbyte-webapp/public/images/bowtie-half.svg @@ -0,0 +1,31 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/airbyte-webapp/public/images/octavia/empty-connections.png b/airbyte-webapp/public/images/octavia/empty-connections.png new file mode 100644 index 0000000000000000000000000000000000000000..e40446edb42d8bb79278e9a2e24bbcabcae1e849 GIT binary patch literal 18118 zcmV)_K!3l9P)d0p80vzl6jLdo5=Z3% zm1-*aRMM$bP^p3ojRF^8ffy=%AQ6)Z*q%yP!!ds<5vcv0^oDJ{VPB`HW0Gc2DW#GN ziPXEPoHCTnR4U;@gK(u0mL+93D&bUar!oc-Q7^{tP);R{$~G$BQ8{ebcfkb{u#~)k z%C}UKAxT8Li-()1KE%JdKEO9rA0FIV-!`JN-pj*BAM78l_w?}A>vV2Y6IJQyu?w6;YgiRlNBk(Ywx3rBDvFBzxO_93ev4TUAS zmOi==lH{FFWhIp&xS)y4jj)5LOs4V}l^{zEJGAO6h}o)HdvtEyAD!9`0!dJiw1iIQ zxSvI@*Ata|WufFyZ8GHkn`z~w^Nfm9x+a;K$6 zF_GOctox0KBPlwp+f-aiq-CE%()lCUbZ{{?AN)y3ZWUPPT}7Iy5grtUsY4&fh#t2gBBZTDrKh8iUy_aG+ddJJ_i)ly^C&9zr6HOUBU|X>2MQlCv3(BiXl!w?k z!Ip*XxB}5s2-}9rbC7h@5cR=MZTe&8)K5v8_H)WEzacoAaTK4fcoO?gu2EL!LF!>x zO{m5dgr@Lxhf?{8N=x;DZqY+=$LJT)zte=vj;KoL$!xZ2_k4V{`d`Q?Jf}Rwo;23u zPv8oHGe6}PkR-@$P#M;Tb-w{q20w;g?M4dQ=aquW>I$s*>r>2I^*k!8%H`@DDi1?? zsV{ptD`9(3;Za8`^?{qOehLpxd{-FsuT&(Y?#AqeHzOnWtX#!03a?Um7ngawg0nq! z7MIhhETz&+d7x|b5WH~bB1|0cpfFcm$p{Z>g%MZXf^#{ENXZnw1%8ek7s0_h{GwmB z(PGX-SZ3iINP`J)xl%{}CJlTD|G0GlT8DRp3ljXuO+2#4t#Bu6NPOZ(x%w(9J*h0m zWh!Z%MI~Tq`URvQ6(b@;qw(xsu}&#^N%x*E$1X z+2_x`^S;W;F42SW(w&PD5!?nYSVuzY9=!6^6lB+05jH}yAY4`Lv9IP6aN6GdfgrWJwd9Am-bfy+kHUP=ke{!+Gv4^>u99`rCC zpZeKlZC7nfBwaX+S^ph{{E{rWY73R|kY++Od0Z+9%aU~&m637<6ZD>IX5;<||87zy zD|vp5wKkTZoM)qTO-skr2nmQlkGK)obnr*nRvQgzro}qc&P!{uxD?D_ot4ApCdd^` z*89f)TX?P1CMy=S&g>h|B9D6FS3|F_CbTv3h6lGsyB0mL@6;NURfsqQR=HS6tVR>Z zrRaA#c;#QpifN->z(c0(niDp5&VSUe3OsvBSyFD%W)-V=Xf^Pwg5KsTVU zB!@`pMh{h@9*6kJ@b?Zvx7cC0W%w*~kGY!Kd6*E?QBu^=s3o%=KWuml^H;qfA7T^2 zFe-^?qPUoZW&6}7DzS1!Y>TV#?%&rT*uS~hclQF?5~aiABnhiVQ6_NY57Z0U1!-8Z z{WGjR@D&COXpLcmyJ0||4rmb>3O`?Ol$BMAU*p{Q92`A<7CZMOV8zOv$jdJl=XY$~ z4^sv|igA7JfzGW74G8H4{QS44@ypf^<(gxVrVwmTW{pVJW(ii}=Z#zd&?ES2zkhIN~tE^sLh;r~CEu4MwYG9ndPggG1taOUeo` z^V31}8OP+Rm#DmnCWwngSe}>klDy;Q=7G77ZWmIE;Z0qC*YOf*&4JZ{150R_YwI|d zB+GBR{)cbYyaNBgYW&{^k7DBZ{-`fbC8lH6%k#0E$Qu^al3c}Kk=^qe)E8wH#aOq0 zAvPRXgkxuSD5I~8@Sz{1SNky-MQT&exX}%=OP@J^7|+kU1~~=7vA{93?7}^QCWeba zSRSGBL?xq=I=rb&9kTB>m49WR;&gzYiHpKc9NCn|G)m-E##`) zidj$Ggjb)NCh44Rc2SVPip zq|N7UieHS6md${VPZgFfd=Xa<>1MWVWn~rO4=3UMPkzC#t9GKOxC{}bN#UKqLMa7hH$KXSU$A7w*E#&)!aCB^}U*-Uwe(pFP5ftlo68yHuFOo-OHu6(pAP zVJkYeZ-p+MT4T}An{Xm^HzxGIAMS3}lee&B`(fi-$S631-&2a{7f~(4aMSfeaWj>av@GmDd=%^UeTVX@QuK}+W4CnYNtx^REksGV zkldZAe2ykG&89I4%ia}^z>1P9IHr~7UNJn#BG7&W-GGHL#f1bKXzi4?)g)DCWMf<7 zzrLO#$U5us8wFu??nKCd0Yr#a)X2Q|LGKZQ)vHz@HSGc-!b9NcX`OtXI&0??uLx>Ox8SE6^+~ z2)Exf9I-Jiuz?1G`w}*i>yyP07E9GoRmt- z+k?JgFswnR^TLk!GK`w^JgFzsaNlDeQCDo;L-E@4ccL>9cg6Nkke+Q>XW4sVHNN;` z7G8Po4%{$luq5662TBB0zKS|}gZ*ecvFhj9XdM-fmAk*h++`0#Ut^!g zm6M4K>1s?QGMh>VG!ZnWgk{|*THfi^el(_!dsS?c`ihuA+O}2(>#K>|YAph?dy)m1 zPN%Gp_Ol97@aDqX(7t^neqHnm>4)^NJxN#fw*5IIBqZRs-+mJVs{;oUG4I>eBsps^ zd`NeaVA_(?)6-oD?iDMyqk_6sH~IFs3iQ`fd%5C-0^2>n|o`+5^rPww6=_dfoZUwWjhTngAM0!m?#xnDImC z0nC_eG+Yb4)x%7Bw#<{#Ad3gGbqJ~f)(gM*{aMsd&-T@4Pa`nUkTk=~He0KVJ$v?; z>{nJ+WA@vNFyooeES`7&-Q$JhVe`Qy!c?V&C7a(v5}B3eymAkGeQChw(_Ye7Euf&- zjb0w}lCx{)Hdr+8Ie2?};-^jT;zDjR>_N|Vqew+FMs>2d?SdwN#v<&CP*7}apSuwq z*;RDiUi6%rx7R+YIGgN+?6|cd1WM-c?u0+cmiYt5j_!ql{R~rRSVFn`!5-}0yH~UR ze_yS_f9J0>d!7#wX7Io+C@#wr_Ea@W%L?)5p84o~RXa?XKvI?TWq8HF8eaYPJX|&O zQM~c)4=5?o4riS*aUhfD}CZL_|8#~7n&oPWwY&;-yJgyrsK zqOxMxRW})bD7Fk}jem-Bfa7ZoKd1&-l`4T!>i)TBfjEBuT}mT9>l!v?*c4d2crgwf zIAHepqakCoPdE{u9KTQ1H}N@ zS+RSLfL3Jzv3t90Fs7Rlq~C23SFSd_0nq zYrFXLbJ>`D`)ursKV7FS>+3H+J5AjG*K4F8IOA``J1XL;F|O}D#!EqRBseu1gRs4o zJ0pAE4sTDXdnTU&yfpH%C6oGClwn#Y|L7*$I#VgT=foPJH*(i#Vv5p}?pIR*Td-ik z0wH0oVbPLJC@n45JjRv+cQP%Nl5JDj4fCYH<0(7Qi`0RZEkY%Sh0eOk>|g)-SDhCq zhdf@_-SVn27)Oa~$Mr>I^AH?M-l2>aF!{ae=mmx{u=8XD`59-_a~gebI58YeSaK!R z&U|G(X=KmZmo`8{xUedarLqW_NL0m-?xrHFG|B}gjP0YkDm{scBv`}z`SbDjzyIB$ zbd|wUE+su%^H>FWq*w|Ul$suoPn5doTc#SupW-Dl|58~0vK(wN08R`?6PDR&9R3FSg(0Rztt%@?!v&sp zK(YwPFiKSoiORaKC}LYLM&3L^@7z&iW^Kh;$?d8^ko^37+^HF`Tyz&a3efC*c!otNHP+nfw{Xb=* z%uDzjc&E#mg?bgl6)Bq{5(aP(pjy_7rcqxu-VKkSSNj4*9Xo>b(e*Y2B; z6{*#S{eQPicE3jL6E))Ob}&!1LZt*sYV zR7eDB-Mq8O`;nEG24B)K+qRaw7Cqj4<25|==<{gayt%R6`t|Ga@y8!y`SRtk238HW zY(FgN#0m@^&{@2^nfa;0ViDvQZv3sZIuJf4*K=@sNI)c68+gevk$YFEL^u^3Nmw45 zoAk}&`c5})`-awa!(KQcshdh@kyULS;6J&B`S;#Fo+8T094@4v!mF=;1V;z2o3a8Gf*Zi=hp4laXhJ5--xdPX<4XDI3XNK zSWbu8sdiU~b}efKu3wmG<7yheuOC4YLdFS)s?n^{?eKoVI*BWED)Aw6?{Gq z4Mr3ZqD#ltb?R9a_;bBv9f(z^KUFoAB(X9O8)qv0^YV(2m0f^2bLODF(9*fNCkqdi z<`kvSz^iYEF^Ur;f0p2_>Qq^jj_Ngy2+J!&3C~wFoRqMR)M42yV$_bJ!s5_6yw)`u ztkPdu6{s={=Q6Z(; zx2jEE!=%#dtA$inx^^QXLeWt8`}%0s$3#m%GUt$0jsc*nt8N)#*zeQ+8d4*SQsvUF zSYiQ=B}->!ZP5dAS>H)`LCCUhiILZQ-H!E^Eo0S zi6Bou+OXfB3IEb8?Xk$}_4~!&aeeO<>v+j1D$7N2X+APC^T-iUE7pb%Y`6!sU99%Z zgNkx#09iubHTfksack>p9Wy3gFwxcx%|ncX*M4wPIFhi}DtFY&_;@SSze-ZqiVTTK z)Y27dGnSjFbd^aP7Ss$u{-L6KO*3O|lELoF@Mad5?XrmHpS~3x;_5nq*e1%s)lBH? z2K?RR7~7utg{`crHtE8rTssI20f#lRT~`Y}KGGn;&nKX^XVTR*69UlmeD`c;lsO!z zt1ZSaF&sr${{AL5fFS=`qbvVNo(k7~K8=^E$`og+FQy`{lDxHQfg1*P9WBVPYOQu0 zfD93e_tR_e^RxdM3?I@B|9tvp&13&tunNg38Nw{ozLlvhSVuoC2X$v;<|D75*4TgR zjlH5z${6E{qPi!`f3>8n30W0tLhlaJ%p0DLZ4Lcs@U`jpw-D3T_W4i`-a8(v zm%dH|yt>wbBgfC;r8mD7lJvd_Z_HcD zj(qv?<7TDn(PPOtN9HbeUbJi3Mbo-sVu{hX2G6(i_Y#7Yb&Hm8LO9B-#luUJk$|UX z?SPAQPNNfu?5@RmrY9?>y2u;KA5RyC3yeSWf;)Aqp-_bLd(cd z5tquO-M;fEcJ4bVBp@63JCWIH2pQn}^lWeT{F0J#JoMxzxIk`F&K+>Wu&3+TH|71# z(Sx!6!1q|P;x9~}HpZmQBlk}f1Hu{4d`j2Kvv|)pUq1|=zV`@1Lj%pW`El7cp&oUP z?u)4A(IWVr$9l5aN>^hrX&HW7Hh=3C(OV2JPhXJ}A-_cQ2(fU&wZI%jSQb|%HXkqd z+FfO^HpdRsn3-5?jL$IG4b4G0c$j(f8`!k}H!S;YJHGpABksF<9Be^wkRK+D>nDn> z;NX{g9(iBPnr#sli~l^l+2WLIc@VRHQ#^9XL!~C%jgtps%!nRX{>Kh1{9yxj?K^?O z!cus7c_6w?1V#<-fte3quWA2PqL^Ny|IYtiY+rx%eUk1<+iuKCo%CDk4KKSKW+?x> zdYRE+g4YOBoB_Nr%J*pDYGo>JJ&}NG{3qI0I*eqjcCh*U>9%kDR(fV$j&9jbwf6fwhQh*~|Kg^o9=aAw{db|LHGC zP0xa(arkI5-uiHf*nVW{s|Y6#jhjMW)f#NUOz*|!xj;Xob;tfu;YEO>2+Qs!la9We z7%KCP^{Hg?US=jz_oemB^a_`uK>{dASyoYl{NhTKlvj%|KB1q z75MOT>%lGd;M|2A+;Yzxp-;y~b;fOD9}#3#n*;&VL90@wl8?zKDK|!>Ipv_sQG{j1 z$AlgE#ikM2nWuqMTMWchOHV@^qvSOZm8EGO5w)7O1AIN<%ib;O*Q%%@r?3)dGs^^- zxm|ETbG-4uB3w849+K!anEm$mh`#D?n7?q1F`CL2l#rhKJ?X-eZhH;=M?Zy0t$BRrlw_!ByXR5@f)6b@k=cDZY@rpPKSri z7el+=%T(!nfoC=N_bcvo{S&?6y zX?j%ISWQ~BRlsmHy_@tSmys%P@_Zq=I(6t26-ee6H#1QeDetzT&zS zm3j>Cd6P(sxoX!!Y(MfV4km8Fw=@d>78{HoA-r#7#OLYU>gXrVjxwSD1Gs+3+7w%D8{R+bAf>#_HYk@zw`R@Yk;6_~PT4=on|a*DIeqX8$26{Wj^WYyQH13o6;C@gt_{oGl*K13z0&GUlkm6F z@){IVcV1pyjm}ZQ@FKz%l~rPYQl7||9osw*5y9R#K_`~NjEm}x))5^=%AxMD!;SOM90qzr9-R1| z2m)WX?`v${b{JjyKZLt(9fc?-Ay&vN9<;t!r#Q zyfb4l+D5iB(f4Z<5>;8$gw|BaNF-fyb`@)Zz(s z6o17afw#H)_9SV_bv)QQoSKIUQdoPm34x!tJ93K3sV*O(eq{7-8H|F`N>Ud}>1EL& zHat-3!VM2aHd^pLPY%p0%0ftBbJ1=3`8eKfu6zmA;6UjGEB|S-lstQ|IMH%wNFcn) z4x5>sPxe?(5s+P`m=e>fc^v*e`Cqth=-uKRS)F3No_if%FL#qcDNmA6?(5~hN-1pk zZFr`_eebjvJprVt`sNcdTp~C!fZHTw{?M5%LW)_`(^OJc-jE~Yc_u3VxU9?akJ_zO zh#(`QvPzH4f(i_b4JVtg8xk{$gudIUMUc3FiG+b^*XZXHEQ$?t4{e6t#yZBBKGQL% z%M`5Hw*b42{)*$NyNQ%Vq?2kbJ)onPGZ4`&I$`SYM=-kg4G0PdHL5@7a>{5>sz+FW zC)rRvQB!S@vT9;#{2pp`AgAzxoYjz}>sdG<97$M{Ob-QQzv6sTmbzA%uXd_yaZ<*&#|ipTf6zc$E6cC9LcSNMLp zS1B&9B5O%C+J=*^PHwA={Bj&Rn-6d5WdeP@(7r{0_;HL57NxyfZ#pPbBPHv&yvgyJ z_GNj#NN^-!SzE7?ugUhjUB`bnOIVXFgvpGoFsQb(RBAL{ybo=D9u#c5K5!u&nat#ybXY<@{4j-?ck z3gL+m((C=m^ru&=1%O%W!RGiMmS0@i z8>LfRM7A-x(V&XWc?`A+97)MX%TQmmGwAcB71i|pD#&K-CWyOi{4CIz2g4mZG|mWT|)i5(K;+ZSWs%rSO~P#Cf?t9 z%(w(Cs{;*K9JO)bXu`6E+r+%J!JZt43uFF#d(9l+}9qipY_2s5ZClCO9z~V~qpOo?%S5Ur||x(S7a~fx!&9JFJhC zvIc#f4O5&<+oO~u0!2kd$ji&a_19l7_5%Y0F=)^r%$P9)u^n6D*DYV-(;xnUG&0fo zdIyX3#bn#EJ>P*d!Q>X6N7A`NI6yYn<7amWN!TpNG?qZA4jHen8jPB}6nfJ-<9T{B z+c3QrgRKL99sZd&`E8y>#qoi=4uTGj2NO0ydF@+UbVip|JPCN(4hXK3_YT9pTPpBkJ^OF?<5G#xuSvODI^j2B5Pp!kmJr=qK~ zVfn$SU)(yKl-_jn^{*#m_i>Tpb2F6@aAG)m(8V}Og}?n|x#B?L`g%!JMOcA(%|N5S z)#5MzZ9_zB)4YpVh*%9yB3xcxE=cR`ZJLs(YuB!#%$zwB>FMcW0goL!b|5L`XPk~d zjjXIJkr|QCkBp3judgqnqoWbpA`}y^?Txs&$rw0rAl`ZBJLsA4H!xo5fXpWA}^NKsfn zB(Ux9Qq1}8W@@M1@|tU|5qfQh4z@3uB*#o>#ZGYDrcIkLYu2nf3x7O$^H&&Cn@qUQ z{oDF*Y_|C6>$X|4aSpy&{Q__DV@8d^haY|@bX!{>lT=OCRjXDB-FU);2`aLZ?n?IG0bQq<>Y=L9 zkV%~dSZkuzbLI9GRpnT*{bRWqPYiQvvN9S&C6KX!%5Zrnm2|l+hnI+?X)-Jhz-(Zj zJ!*zg4<_5?BiDb56Dhk89TbGydf!NH$|Z%O^kLxo4llv^%B= z(^sE9eGnRIKJy?;(#*_E96o$lSR0luU5fMPwO5|;p+Va-w=N{TH`Mm;sUdnTI$~_N z?#i8?Bd<{0BNO#wGyyc0O2FV$cx>V_<}KtP>A5raYJaSQ2x{>qP#~{31Me)ljmE{9 zcxdQ7WWx+mT}O|zIjPv6dI;-JY=KvxH#&FejJ9pt3VoLKRGwm!o12To#6%o9azw1A z$m!@Ut(oBK9f;}U=is_QkE%YeZr5(h_o%PMg$ITI_6O$#uBzAsO1tnWS6 z2j;H*^QJjCaB?+ovUKGoJDoCETMLaXVHupOlDnH2xkAjNe{45mTJ*5`!0PirZP8=R ztC@QIb>tW9II?lI;;}x`MQaUF+b(GoJ1Osc%obEaGNe*m7oojK+5Abdygz!!=8hVd?B>1ot>A0 zH@}^P@=6h;{eM*6M-#-wBrJpH%y2qCqdYlCCMC{`)vX=mnGeaWJ? z;eew^*pTb|clpCOmAZ>JIqASODB_lz5-v7jIf{!(YZ6qjC*co-1xF#SRbQKlV~aGk zOSiSBje_AuX{^|NB)SrsO&})#lW8{j; z>T(g?FG2tfE6`jll3LR@>r3|fT09)3%NCIPdG=T11!*A#B(b%|_C;--5SNOu9Bi=# zl0(6`z86b@t%rU>NMH-JkFvb>fZon}bsJ4m*h$WY*ofur`kt?;rYOL{1+R3lW_JDu0c|{0e!!@QY-pqPl9;=ey+qVh z5iH=ZLyPg>r4Ps!fmf-_!DR+#A*_t`RCq?*cqkU<%`dt@Zp9TMKR`@WFOy~M?UAI` zj+-Ne<}q9KN!yw=L^BZBuy;P@(;%z7LWBm)royuwoidAdL*Q(LWpEb6pCHXx-~|SR zw5_Nt#Ub*F9XYcJJ>tfQ{Hiu}$@Ofbx6;5CUAWHm?1XI&6g_qxS%OcN+>NTLaz2IC z0FJ`C?8wS+M#3_9;y#m>;}1A>xp8%x?EF;xbyzZewT-;WWa3|a32blI4lCEP1vV2{ zp-(6iJ##hZK=^U(OZaA`$Q8!%g3nU<1eYhWB|Hd_)zBsI@R+Ii#`q-GvR z*O;r}?G>PTazh72jv}Z{AOo#akN?+1r&{^P0(j8WL=CA?GySTrd^~ zPyNYNe^D7laD8|=B{J2gTVsQvl8&#s#!REdR=B!RVh!M|CI_a9>J;rn*gI> zC5w-|Qg?LAKJ=m02nuK+g69K#Ls3SiFkTe7y1D{2HC4ziNJU0YqG16(O=AN~bWUE) z;|nHmJ}we22f`}B5AF#nS+Cy3U0>j|r7gBg+-%_#$=vk7g2&xlV6eGjp5Z8Jj0{p_zhMX;4o&jhO&H#FG;5$vf0$2vZ4g3 zS;vu(v>ER1-k3COmS}Iw12(p?p|QP$v~}9LJ*r2Ylgymx2q~R<;Sd(Aco3&kcgj^9 zI?5RlIjy>Dzxs-!9kF zXgR{{;K&?Igr?rgaXXHgYYQ9O*T>eYak~AIxgKzL_e9T*lZ483{LBWqnjIG`T-Lw= zXljJz-<_QWiy#@vaaG(n%(`m@M)dg`yu3_T4!0^j?ev1#N;azmHZ;&TfIW^Ro})?J zG5Tuwcm)X38tk}(A*pNY(Lgjc!g8D^XDkWgCq8b#qj>C=@5shzoX2R9Z1o|iC1?^; zqtaeNG?YO0NPL?wrg35A3hT;d9MjRh%@ARUIe799xt5(e>})uW2B0YsmQ5vW=M$Fi z+sDnp?brMp76V=`20>LP+H#G8!t?O-^s(89Z|k*fKEE;b0tI4P^&^*ZJ`z)R@GgH@ zo+Zsoh8LioXexx|JVsx^5T!MCz$3Wr8q?VymUK^B>TEi0hdybk4NqZ7CWk;X)s!D= z=b8O1!%3Gq4?&Fw2Vg0v2buz5IoN}Z&@}pQfoG?$M3BFYI;*Y697s-E+VFs6?asMK z&p3+y-ES2^D0ba%1EH@v1AX~+`g@v6BJ|a1(AN~wfntfgg245>HtMRpEt_{i{E6R? zUnG(!%ZFE>9&mQTat!A@ND*1+(SAI}^nbWsCM%O2DJ&3lZf4uXYDfjtQx{uPLf<0;tRCugK6QH)`jQn@r8u9RAOcq|T>Xrk(})afZ}}WWy?iV^^BCe2R-mS)8rKbfLA$*TFM*8l zL!QCTgWuuLZT~@RtAXg=euCw5^_4^>l7^K@v|R*U4HJ!A&it)BiEHkpIk~q+PKh6~ za?jvk!mrqSYzcDmQ%ynyy*<4#`+<)U8rTe;1v&&Clw?*7iG+&gH7ILUN$zW&yg*|l z_x2|Dpf>5*qx-gs?5W)4XUYq;T!5W$b_vTaSCPhuO;^(=exhj=&}-~y{g7u;cO$Pb z4H04Hmp3x^0CaBd`1ZFM$jnW|@IE}*!OiY#DBNsiQ&CYF@MI z7`c*oO|1&}rntfHgq!&5eSs)TJ$QD_Iehx_OIWu4HEh}Y1-VGKqqHo)&fQeg3&*MM zyT3MrGNr`M_ZEH}_ryAvv zdMw?%5DCe9p ztfTh17Ay|MrP(-@@)tUG?246J-zFF2WWrd` zR77_SxM~_Y$Baf$U`vvexhO3;hvoq}2n`5Be&1?I53i{~a>f~KI`AiU95F8K%Yi05 zeTCyvxyxmerYLTKvyiy$II?LRm60I5g)&Nt@`A(dbk=FChWm7t?phM6P$(ZscQ|C@1``_ac(wRQ{QcO_ufy&K@~8)ox0 z)D8I0H!JSPw*B+Pen3!UVNt;alL%}N8kBLy8%D5y2s%Y~L)(aU;@rcD`*9-W2oph0 zs>f0PoEM(ET3hE^u|7E^&@brWu%PfkRn=8)8M)_3B1)eb(X0c#Ja?iCxxiyv4HC`{ zT~z{=)ASR{gyrwzD+X$M54txu)5Y^Lb57yyd4o}0k|kFyg|yy<-Ed|JYm#ih$J>~> z$ZqMHd~QFI(sqlWjIiJsV`E!%O}Wk4a|fiG^7qE;ue^dGR}aO~wa??O$sC!aGkdbN z&tNjhge9_m4k<%9M>nyvBtRxBBfmHgyN+(-!j(yo@-}ed47O|5cg+G;biXYuDd56Z znOCUX1IfsbjO>&6ZR4w=w2J6~af2VkkWNvMznMp&4(_F-a)tB!uL9_*lTqz8kVq@p zm$PzDWA1kolw@TrMLprnI;@GL;nh#-=$SCDJ1X_h)Qm%-JMtsaySsat)pISCw2VWd zZBlYFdR-NRCucq`%u-2ddrdyeq`*An>$Z;`uAKeA8M1js>R2kQY+nP3Je=tDOGwGx z*e1;>%6Tw48q&je)B+oAy5V)9kIL{nbP!kYY8(ulV%kwl#tvdD#v){HQ6g_1nue~8`vj@k~3X>8%n|c|GMl zv}w^>X;{A+QZfu@0rzyyyyv7E#Qe}kHh)E?BqbCule;{y=!^1zv9>3 zdfkv4J-odAq7sr1cRzS~Z*f^gDGQr@kR~B!z=3dP!g6XHHh2qZlb$J=?fBu=nF?+0 z3UuJ`$(2UeBzt*a%Xu2dqN4`j7HQUPX5BCYiZ%C zj#IFL91?~p)2u)8d^KlZqA0tlyy)}A8@|ECf-|cG4CAv-&pa%5Gs5y$r`t6%@X9Z$ zogQCOnq%`|NO=QXdUTp>tlPNRWE?_L_IYfNUx4hqWSi9jsM>^wbwEt3f%2i&kdocG zpn|g#)|j)YvZ_?*qb3AUky8z>lg&{K&RoKI39J%Hab3A_XmGSK5ACK+ri7KG><}XD zS9Lxgu>Mjoc+l0g{k6&}lQnr=kl^fu-5_tc>DPXoau;=ji^ioxs5K?Q~Tp8AXhB7%bkHWNkqE|GWov_Af zu&fRwr|&f$(2`cleVslJ)n)Obn`XFsNI}V^zI+@oyE46lzrmyM{zJ=dF{mji%t!o?#nTx&qGQ# z^xgciaKQ*?C#*?;FUOEtB$^;Z4Y>$O66-6ed-cJnzSoL?jo1z&L|a+nnw*Ea#L;>w z+{v^xVfep{ZJz#zsQ`T9u}4W~-ijhJrOEdLjq6%^>4Jk=p?gQ;0FMJx{s9+^aCX9) zq(b-b@G)*{4U7Z{^d+KjaI<@ zO1|rAFFk6!^+-rQ$S0gB$UDdL;c-6en3?lnqFb69+54ty=cM$bnB9+dMVvS;WUni)HiRE2f<%QZF2^ zDq$_*>i&GcEoIS>mccx?EM;b&9z3--ZT9!>^<}yETPV4sa~KC zF=ObZFj@@wi*s0*vkRC;hco{G?DjEA*Z}|LA{0+eP_3@LS{H?+EFk&3agG~{SvJ6K z=d3GvMd>z4J4;07*n&H+{~9fti#d`;(ysVzC9a=531`yRh{Z0n$f#Fc%v?avP?EBK zKB4mM^rP|+u5dUrVcRKpq9S_KA*;NhrEjKQ*e|zWrS><&_HO=f-;p2de3(7bRX*

W@LX^PyGgL3(c`QQqLvl5n{zA+NS z-!B4@)b(2OPAS2GGAZYb5_YfRkci(aEINDatvvNHNNYP>ad1Y$^32Waloh?Y-csiwntQk$jdeJ|m}iXjSEc`ySuC841hFa2hB3w+Qb-=A%g#rJvNL>)}~-1SzR&arWFE`4E#+y*eLH*&vTs zGSV~R>s|-8OIf|fc{(z`^}T939=dBCBARs)$1`(I;KOTN85c~ZA)GYk*ixej55MViaX3-Hr#_anC;#e7(!zV1CJ!HYlNdE+94*QQ&V zjVnwwa5lp7+M1o^ozA4Y#zl`3+iK}k`#`JUU9|(ArD-B{q=&olF|*?Mt;T1t$7?lj zB#^|*BI#$3RJ8^r0fPct;hvj+p+TdRcObto9SasuK~b@--043P3;@7!lSaAcG56V6 zG*6am=9+uUm7_?=ksO5~$2KC-gdA&fB)KXoW|YZDX^PSuv7EJ$lwo_2GxhX&{)Xqx z@8$1L_@-iZ$(vv>h*-U^(&M#aC(vI0>%2smCX5@pdHBdqVz<+{y52K4i%1g3KSjcN z*Mc2TZ);7BCG{LbBOimo{Is#L;_())OzBlqOn5`RwCqIgKmdJsx=OX~=DUrBVuJx@ zixITHSbmZRXFO5R=C#nH)SILbGC__uoE-5(wKz%>tvofn#`WyMyQ{UDyCU9R+wnER zMIa(nIVJR0Ed!MomJrM<%Iae}wOmhORe8yfwaB9x4RaUH=7m7hdMBQ!gp2sNoeD($ zw|$_S)RENj5;0;7O-Okt*>j{8REa}+?dQgtbM^DJMAIxK+uD4$V9ztr6CS?hxq4<+ z=|+ZL50<6Oz$#NWoh%A(#Tr~M+k)0g%hc>yj+&bDuS(g0*x$Qh_cUx;uY9vYN`nyZ|Utzcdh`!!ZX+222tmYxu zQOXL_^jE{e(Hq(-B|u-tW0IVq2+rs%X7$ZqE{+}wfa15+b~t+t@Nc}D>T|-rPV+r? zHGZnEQ}rk3qU~M3*FhQ)IBN-lrL98*u5yQu9^Jbe>d?l3$PVVPok@3zLFgp*aOP|y6}mum5**V0{1RUo1u5ZO7^!z9f!HjB?@e5gB^ z>696{w*^#D7E?#>kyaV;HZ86$X3Z$?qwag!y|U?AEUxFo8xyLu$BO^2sXCD*Sf;qg zN~RF4X=eC>72_a{)o0!flxuP05EIt=kcH5*M1!z#I}f7`Wxw>qC}QpA$)s1qY#w%T zz<18l5nq<7^VId!b)K>1fZ1-T-c*NEb1VkJ@d5skQHbKi4jCO3En}p z0@uJUQXOF->YVkv=ccU++DqZscnXmopd}r@3Zk-!7u>yq%jNWR5|ofMI`|4eZy>;C zF3yJ*YMi*VD|SQR@&vB=^*)+Bx+6xlLWF7Kqq^c+Gjc7dGU#B=$f7dkJ`YO@cpe@n zD74>BVGvF{Hgct;5ub@qmyDHu!RV435mXd6P&AWbo}*{pMX`6P_4} z8U1>)k)lz6CR~3JX$`Azw!i|Lb9&yj)Eh)1{WWfq51*rr5!wmgF{{Y+NdQ1^-SU%l z$rpN=4je0ps5L9#MJPk=BtnX#3pFXpJK>%_sXycFK#&963|W1gX|9^&81BoWGic$s z?$gt@e$Aqfw@mH{r<#(z|A-@e-E!8xn1}OX1B`(7K&xQ&Ub=BH@$+c*j;+{o)X$J#^c%5N+-~&_QK-vq)D8;i zB6;h$gG?L}U_fzOl7oL4rR-F@IRCjFB(k^cFG- zj8;R*kg8a0&BtpfiMI2z8-`17UU-2PbdHla6lM1Wl>0)4=-=(^|1a2H&Tl}IR5NuB Tkn-gZJ0pOxceQJ>3C#Ex26K4@ literal 0 HcmV?d00001 diff --git a/airbyte-webapp/public/images/octavia/empty-destinations.png b/airbyte-webapp/public/images/octavia/empty-destinations.png new file mode 100644 index 0000000000000000000000000000000000000000..c985d0821aded07ff8ac83ca76b48d996ddca749 GIT binary patch literal 17703 zcmV)@K!LxBP)`&z>>U-*YcE#rdeZ_)2{oAmBSO5hJAiYU1frL&- zNP+a;%l_xg-rel(?a2+?!2S3QcXxJncC-8KnKS2{nE{+|!U-pwaKZ^EoN&SkC!BD? z2`8Lz!U-pwaKZ^EoN&SkC!BD?2`3t%gA>N!rvLR}DN6s_pwA0rshp(>mf~5;W~rW~ zVwSR4s$r=PPP7P|gau+*>JO2aM8Hlgb=M#BWhoqu&q>g43;KPXB9BO#!%`(ng%C-- zpQW?_yd zvy=u=BHEnXT-=2q-!_7ucd!r^*iPsW-c|5$^AZAm!vuFXPeG@15nNo{Y^k$^rOhne zj7Am22@AMdB`oQgx3F|KOT$?5u;wI2N_39ykMQ6Q=or}t@sWMu;^K-Rzcz>p?F?V9 z0Ms|s!IkCFqpPW_X6Lw~UT8pFT@7?DE=b8bf}Emsq-7@~yC@B3GY=uJBon0-h2}0@ z3Q_WY&eB?z%HV`iTy2CM%F+y$9%m`QT*1!K12{3GLpz~syFuvEVJMgc1(TN0>FoDd z1VLbguEVAL6eOkY!s*O|IDB>^GIP%`F)J85<(lNesi(!U|<;MX)- zD`7{mw2-CY#tw!uk`C)W6BGK~#hZRlcW<-?kovGS@jGlfvJ`ty{06911~Qqw%hEe6 zRpA;%Ya=Y#L|=y}RaN&+y64bt(=ncrbZFPYX)`LmaKabJCFT{>beT$A#zTT zWnnk2L9`aawrA;Y5Ovfb?ZGbX2Vvgq#Y~zGbjT*ZIk=E>9E;aHg+pgHD6@+owlJh7 z)Z-dLYk0dOSo(pbNbP|hF(Yv2_!lv#%haoms7eUTY_@Ly=lJ6HS5Q!TQF(|wX{5)W z#x(**e#>nkN|4*2F|3d5c>`t+dmMc`j^(z`YX!CSHCXebANo}r zE#^psB^EA#*qQK@GIi|V^dXPn*;|*OU04E~5aGkz#AAEk3Rh+gNle))W%pvK4@;|Y zl}cJiQ3*(zegZK_MMgw$3?7~F4)aRg04GdAz=lIhv3T_pD6cA(GAS-$BCbMvts@YY zeE#G+AE3}Fh!IsX z$V)|J4e@UaC#(axpa*oGg#D-1qpXU|v^+4KVl1dXaMeiKD=A^gUrN^S5z5RNLm$Nx zv;TKh+f`c+JcCa)B;z)>Ak1Hi%NwWUL(pV{j2zu}2`FL>ZKUHayz+`N-q*ghl32ApbnP{OeDf5a`>6@4I>c9T=s<;zbFoMOiM8{2Q8t{E|$p z+5JCkJn{tw4{nE%!+KzF|ITO|5ey%1PgGUa@~3g}QUQ*iynww2PGQa3y(lU!=jSK1 z8;F_1{)|ce?}g5#6?F*N2l(MHf5DGC-<5JsKpaX`f>wyj_2h|u6j{_y%7_Sx!Qy%Q zWwLrO4J)2~`vGPwS!t|>OUGdos%k5+Y~vePvEx6u^@ibi?x~y6r)Qj1=hoEJVe|Gx z{PgP{EMKt|C8d?pw1D65oYC`Lp8f!AMZd!hjvDESZBM$^;T4s zVBX@P>>4Mete04N6Ri-Ji?B2<=_P5$#l;Pa{=Az@ErKU&`nyjNNoxwE4jfs*y1CZ& zbD3oMW#7m6YQwAW^{>bOz4K>Gn=%MZ#o3fBJoEDBSk1^A5)jE;#a}SH=VUY$RW;?< zba*MY99fPN7xpNluVi?!2hz9G1dL;9Q=jg*(2%h@LmvY%@NSc@PVi--SzsXL0AP zV-eFX9MMssaCdXX`3u>|FDOOU#RBa3BMJG1rTm;=zX(icHtOla9^>0h!49Uk|6|#$ zlAit}#L&%F6?4$ige8)a;aVPhMRvlko;wF`Pd{$5A~hhiqnR3PBrA>oe`DD#%vHG) z&pdS#UVH8iyX&04uHTP)9{CSSN~*B%p$$y>jkWsRZHeFEy`Sd7%c~BnmcEEl!+V%4 zTU%R)#H3XG_k$nt^SZq#E3ZO0Q>C7|=_3s6JVmtm*@z{q1Gaq20%><1ON-Eo(9(9; zTUmNqneo_+chNm&DBq4?Cab7k<}0{#639BH&NF4oDPcYS7nfX*r1Lv4|D}8I^55q| z=fhe)Z$=8QuI%$+_Vap*cW$2jJfTxGx^-!XGtgXg z>D&%w71j7{^EPJBtwFy|6D^Ww<&Jl-YUh8j?9*p4X*_wP*@xrZlig=G_CUfIaamLE z!Hg;Oj7$yXs&hj_0&x3HqY)R|7F$?H@X)EP%<_?iL0zV)OoEo+;qHZqkXUBc;{y~( z5u@1_x1*JyB?)^cOLL_iH)gYX`o@nD6&la?JFxy%V5I4j$w6>_4>&8ewoa|bhd}nm zb^AZVZ8wd=d+)x--b{?~iiyBPSfTkb&LfBRz^XO7us3NN#`L}w0Y0H7%N{)aJO1_k z9K80zLwNLw*F}l%!E_ zcSA~6BGPkD((X8xzJWN@#^KSDgeBcbW=-Et<1uH_YkZrjuZSIL+}27UeKloQqeVb+ zPtrTl>697beqKpB-duVcI(3S`&)+YEXV74nMO`+l+;l9+kz*-X#|Uxc*m<7F78~2a zu%qMd?#hMtnzg%8!G7g+{& zN%gxhsqyjCM-dYt+AibR(Qp%k2Xfi_N+DmEHBluH^}+?q@5HX;71+M^Z44PUnkgd# z%%4=c*?_P_%J)88jdxj+oCQh*1^Qz4^+WOM-|u4N4VUw)s%sG%dpmT3H}w@=3qp+CNhcIZ`3@zw(uLHWF=7k7vF4R<(}Zgmf+>-j$uQ(p}eYy+f%iu ztSZI&1D~T`ua1~8-G~HkXu88`=CmPrYVIr)F&E%x>;A5CD{JTWgl3ymh&w09rg3_d zccK-bB?wE+%QR)i$X++e4~i@U#>NnqGY#tGM*RadLRO_ppi~RiA6UYVKX|v^h~H2F zlOUp2s%_;&*0Ix{N9u)aSOX&NQ_p{b*B5>x=iPqeX#RUNWt&0yO-Gg?Gw&>(e*6}8 zzeX$xRn41H;MKp~fgW8uaC6zQ^J3pgkp5FB|8kz<(7=SL7}D)}smNcU=pZ{PT7s}N zFRM(s$dHb#2R&ZAn5!5kphZ1X*HZV15m{O&398j)E0|Q`lsyLvMuBjJtF#E1IO-faBAZfgHr)Y1BY8%3T&tx7lDEIq;FW|-;0u#sdG`ug; zSB>b>l6o?ZeMrGEw?P*)ohNDq?w|V(zi$a%ULJUv*_uhZez`#mf|H*S|GQx}LTA!| zd*!MiIuaZjEkW3R%AK)&ZilD458q}!19%vjmo1I)s4PdFCp~o4woWZ&51ih>U7XZ3 zy1VP87j;(3c8i@DsktTFYx!HGqGSgBevu!GyVHy;8 zGJP-lGIb!bEwdn1B#NY^kEiOmQRzx?3vs<4;8L7E2<68_ok3Dd==js@xOD8gNdpkk zCI~0e_9){8RL{#mFVLqUU1un*8#Il+Cma~|CTs^NM!u4sG`3IUN9z}6cmk;cc^WU4 zN}?(rx{HReVlNk%IsAI=VSck7iVRt*8j+}^ z`|`xM1*Bd|;-P_E6Ko$@yZTrAO#VltFDb3yO|pxmoa?36!PD#b?x`mCd%AHP8{bhqTIb;08)rFZS z6-8N%`zo?RLsVVka2ZO}E6>VL;pV2`AV1g&YI^MfB27(=7(ddkO;?q^m{*huZ>D2* zXcuO-4wCk-+elUkq&AS0A2zrv|9EqYGx)P1n3Af}F$ol$9@H36Lv!!8l)1CnuqR>3 z4WMe9Ck>b*Z+izDx?!`K3MXi6;|65a5LKuDE!2;{_w;h-QBJm^DIG0S^+=tso-rfk ze1UmAa+yOTIx>{$uc2n^p!1ufOTV^oIloTVE@De_?qXtNrwdSRD zma-A+H;dMRIEDIC*HFvsqd5o;wk-XXcHO3c$i8h?62gMTF-7UDbcNQ8 z<)W6Z5@|yMLJ{B_jJ*63WM<{VR?ND7DC`C5SYAK)PpUk!J<|tUTUW1gnYJXnyu?m| zkC&gSqpCF%0>*XPJ{@J5LqQ*2a6s6Lu=Mw<+SmZ!M!lULlHPZ~QOx>FbybQpwUrq$ zX}K7v1uhuUeLN?_x{U{6D@ejpn4UdAW}!~eYFn_5Jz5It&dDuCQHkje9;qf|>1ZzK z@5#*nx7Eq2bs~!IJ+y-E6XuBz<7&LRB8gQ!h_nW`_=CO?4U4Gjh$=VGs3$ z0?K#q7rk7h-^_u4WUaH0*ySq1b zxJc)eoj|OQQeLD!*&sec2ZP}-Fd`+|1Vv!tAkh>2-^D+ny2|zepd{Uhw8jFoje&wq zT=b%e<_@@FbGFZ#W?A zMOgiJ+z2-XOUd3Ud9+FuRvZJ#s$r>MEL%{Jnx)vtF8I?-F%bE=S3YM3bj=W3D?x;O zjlKNyM?d2dmk9Lkf`|Dp?XO!o9|sPf;VB|G7BAnk3c(g8w&)Pv6_KGGQC?An|1AE= zY*|S?v4ynnu=}HX{^W~wd{!DS4yN^zM+w&ocldPP3#Ftqy=T*y9y z(-)Gka`i6M*9$yK%95AK9FCt%#}m(egnzxaiaRG_BYNV$kL~31uUdli9mYlW#-_tx zqqMx3-^=9j{mq_7l78yM0i29O2Qdj95sa=0?YP=<-<=cj;X9A>VmK{f#>$jS_Ea%<$F-CO1!C>V@ zYg__J=eOh8{~OCof4b;NJb2F}G#A-9g}C*;x3K+>BtC@x#|4RqZQGTPG^WOlS|Zx6 zuUG%lv4(n0E2U`eo>(*pXEL~5`%9MYgM-0d5|%V|T0T;q(RDz=WW+}qX*LBz`=3r} zwnF0u5!B1wl}lAZHIuaGv&(U@uo}LeZU_tTU@t(2wA@M*me#W7%nM`s%w|%p7Y?1? zg4}{UKC<-7Z#LoZv2#dC&1NsHf%Sw%Ck{T_U0aUhX@6iXb8=KvR$;>EK4|JX4u_AP!*w^m z#(&!cMd79U7GW?sQS`?f6zUTFO8X6xdZm%D(l)hR8xDTSBy2imZO|u&g5~*jtONbM zszAZw@?7;%WnnF81<}l+txa$dA97W7JRTwO$j9&dKgYs%R^X3)C-KSq^N$rJM$Ctv4kLrhZ;R$@op`LN0<#A|s z1N%ZAp7tNsG}mL(p)aw0R}#7ndW5Mf<5=fzCc1QL2VY-9_kBT88Gc!}55N6>0IOMN zvZ_jqPU{{w5O2@@4jm#os+#r&g+x^~`5Hp2I<)e$Z$NHgx^!uWT)@L#IxKYgPL!5CQT`iY@bOPMW|uQS?~5iOs924L0L7+D@L%72f9QC zqNK8xsSB0tW6>cl%wKH6^)E#-TF^cX4lF9mLy&(P-fa4K+23uh{0Oz!c<3uES^Ia@ zQu5RxqiKkOgZ$y?;fCD&Vy<@|I&zjz)*&IfO+5ZI;}u*#;vRmEq)yebPA0WF-X5-^ zuBy>tr6fqdN-3;=HZ)S*XbJWm{~0GU_A^qJF`d*{T&#|Loc@Su+Xb^nKZf!BZa{!vu&n-E zET}>~+X?Y=XEs!K-l5`BR!dAH&qJ#Y6qII5vl^0gy#NPY}$LE_e*R)D} z6*L)wg0zGNh#dmTlIPoE1>(cSJ|5-8hWRz>Zh~N8U51`Rs6)HgyBP?B)fx?p}<8Q>TlTM(-5W&+JKp zw(V4qR%WxlQ>N1&=h@m$8x+J;>1FC)!1%s3kWNv3WabK5C@lSnG)eK4fe$tY`RMJ? z%HS1l%Vw=gIkQJs*VUs#7}M2t0&n2nU2c2}{~~om3|UaPK?$t4YGD-l8QPdbA7Tge|V9 z#fkJ1rb4(Qi0Sn{%=9N{)dIkz^xE6(?<^>2=D3->!XYo1~3Ry#XWjSRAZ%Gt?Z-Z2(L3|bsjKL|{r3hq%9GTFD z>#r%of7*Ul{nP#r0MTE;Mi@2LLx=k0u$%w6Dt?5&&JzZ&& zmOfl5^iQX#M6xltuuc`3^9W=WNKP+CWUx0n>h<}`ntJy4)i9g23nwmV{*t;Rs{??+ z$4wRj{W71F?XYvj+LN#ZRj3rr4t+N3F!vG>ta*vj3p6OJ$vtzc_r9IkWv!1DXbQBFRZDc8Ruxj@H}IaPUdLt1Zto6a zbQMX{JZ9!AXQsWVU|(io(HV9;O^}s#wjNo|bYyPmq0Uws92E8(gh-%u3gy60YG!D< zNpsM?EZz(Xu*e9NE@tAGcn|sx2iwlfJB=^bJuj1$aL+yWAm#j7yz%d6QQ)~7&wVxs zZ+&|w4xQP+C&Lgl*RtZ)Ox~~dS5AV7vV!9NrEP^RRhxo+Jkc)1k6Tb0OjrnvsZF%M z_k_FzEvW;|pHIJqU~j^bggvqT9bk+EN~vHAd6m6j$hWbO8ecxbwyzwDC^}ivkBeI+?qHHaU5>j)_=y`&XeT8&X zet61Q0m-w~ps@zXzfenAV;^4`bEF(=oZEIA92hMzWeUxnAy2qpQ(J}c{qKx)UYWn2CRnhB<`^b%4p9>Wo4b3J)s50`|Y0qR%+r8*?nuj!1cJ_#d&_2$YRPSq=u$x#FCZOG z%TC7J_j>8#ivm^5x5M{McD^2=9)r9fq+ql&_T==obM$ z<~b9Gm^5(vGtCv7Q<%ookW)C%df%0GMR4_Mz=h0oR9A|_v11~7V$r-rRmh?ot)_s^ zV8i^2(|))Hjg+9?eB;X**njdD+TG657&tKO-RUBnVTu0w8B)fPl+8_&sEV)x)0%;i z_SP7G>9;*2TKhKL_(H^5yz_KY! zN}NkPhrGNzR905<^CKc6;O*^=n3xy@w++U$>-r%+eg=jN8G^Up{tp%`_=j9?|AeW= z<7)-AwZvG=uUtmJx zWWombZ|%#m+Tte|mRYfN0lxbE1=^&+_i=FG4i~}Roh}I*p;=w;vtOU%0UqXHl@t|B zpJ$B}AJ${G0ZEiNb>IXhjGKUW-+hiL)`nH9R^ig6OGe9ju}<4xZ(Yjt-eBwhP9vh% ztRu#Xde`p#5Jjc@9*L;$qZOc~R00A`g-0eXd7KvoNiUjr(D-HTL{PIIfdWP4Ie2^d zZLD9Mi$_M>%WRlIntB9WD9FI!jAPh*dIvoGJ<+vWS9Ivmf$OuRr_vCc!ootNq@*A@ zIT@!;okDSOG0aW==I!Z^Ia3zk`k{Z;T+gs+x90C?s>Ovmh0lICgwG|*@C8fvqZObf z2}@7~brYVv=~GM@^q{!YOp-R`MJYk9*+oA+g43*1_}Hii&?Tai$+9I?B}gyK;;Fc@ z7?rh^aC32kkEbsxm`n3WW)kz~7cfa(ZL*GF|42N4*D7@B(EOPbZ6UQWY(Mf1{`>u1 zv`MB~a;H1|Du{%ZCM?Z+Lz7;~LuY7Q|AL1$qitxMNqy8>G}lZf;dts!yz|4o2xR{E z$=$|dXxu=X9}G#={PF^v$xXq5^h6xbI)UOU(dkDMR7@K>7k5l}9lqW{*42lWLw-pn zo?g@sCE~a{T5jX;K-TZX_KTY%|zEKR`Mpaw1g$lRL0cY#7Y@7K=J*@_anA#Z;LOi zDKFF-J=V0EDd3OfAF(HS6_QhTpp;3W$bfLfhs7b>KNO+9!SJR@(^$ue)PzE&R^*ix zBBv~ukvtP+wYBU6h{5DRf5H%Ek)YLC?CsgJsV?y!h#86qfM#?0-UB7r52n zauAjy!G%F{wu-S4wDwvcE+qOtOvPM)3cBBIME9^NysTUkL<$p;vhYN zCRHXKHV~1a@ra-$N;^+M9P3#6dIh2>u*QzsoO(!YdFT5(u=D75v`Z78(8Lk;Sx)CN z;c^j{@HR`&Dl__bo`m`Lua_sNur}x!wv=s#lR;b7UXoHURk^e)G+s;l+Z^mW@e>v< zy+OJnZ8UHBnk#_Ixn2-?zHVYko}TYwK?>`TxT8nhXsar)NYG|VAmwqZy+>y1u;rjV zKL}%W=NF~p&9A4Ux|RoN{~t^LMJvSRUf_zx%+PdxvOGD;OiF=%ZIIB;aEVi^bWwv1 z3*}54Ybsd;D-PI;gw3hvqt%b%Y{ov?q)7*8QLa+7LR@abQWO`FRuxom;M8vj35-H~ z^Z=`gV~sSmNw>A7jDq2^G*;|46)hoM3BT=Lgf%<|`(}Ih)&j+e;6&Mr|ozHrItqXL9#5!y=a6K`X|UTJw^W zeOj0`R?4WYujbMHJOog`0?p+jsU>~0sbp`e#lv2@YzcEe&;McyCoRN)B(m02p%vpw z5tf22c0hC}$m@HN6xey}2L$=IMW-n9i}49I=4;CsC54UTY>tgs+HUIkM%5GrD46`S zUmi!I=paai7@wSrR*oyVs&FAoUqf6_xqD-_fWzz|SJsrFPy9qVWUo1rRIrd~4@s;g zVsoXde0~$eHQ8Ig`t7gSu%CjEFHyFfGfKUHbA2jx3EOfATd&AgHVqXX}kD* z2Hj#t$Szz7O(K}k_+v@J*-E%(N#$m={iaS`z4L9X+V&=GlIwLI#8}s>0+-WqU=E}f z(ButcvQc78KWne`aV;_mBx@;w) zQ5C@q{y6p>K3esVWD$6cr3JXk;3$NZu$d(q5jO>j#d(X%vYA_P4WAz%Hma{`S$kU~ zsiotlNMZDtwfdxW#hRlTaBMmFIX-8dtm+yb8Ze(F8tv$ivuHO5jz(Al&4Ty?#1RX$ zzyO!FHMNyE#=K(5=eMDE`~*I~s#Q&L6Uzt|3Rt5H8`NhbtaG3U*qgipAFQ|sb#>Kr z3aJ4Ug>}`DmEeekCD6cqA}z%q(A4Gf>NNSq8TcbfG<|i5=%pI?*Hi-A+O)$$UDm*A z0xR?h-c8zwsq}wU*BnM)88rvGf71VmK;cCD730Mfz>BB=AUT8jrL$`A1~r zo<#T9QSkKeGkS7!J4N;)s8!_D=-`%gR+_l&(>0G`-JV56Rx(|Yo%Kgt<#2Su5~yk0 z!qQg|J7)u+Usf_bCmFkwSHjK3ommqG0%q1zFxA9c>JZu5lw0zeNk7dAw#hCyhc}l@ z#L=_sDT|g0AIDM>Yy;f_t+q$cmxe;nS5N9!J({mlw-_A7tYr%*Jn<=(9%4ye7`1cz zA$VxUVje&v!JHo2T=l_dS&d?A3o)%NyYKiae7Ir`EmTKE|HTrm3~g+-I+_5CoHDd3 zH_Zw{&yAiJwNhOnlGH=LAE5sw@)ko3P@>^YG>80Yh@IgBZ~(V%Yb2}$(rd|sL!)xz zDZ|~(3sZ+a#?m~52gRxi+fBbUa-yxKDqG4)FQ}rb1mA6Z0UP&yNGd=I#DRlkQ6VWw zfBz7cXxMKDmIgs2sVnUFj!EsH0i~pNtb^z|tH)(VYbLA&k`y%cAdMX-8?H}?e-vhq z`8&q;yANKT{st%5OkTmV>R_rKs~rqxkd=1|%ho-Eq;s2c>*^|L=={$t(c%%L3Xlmf z1{P-Vkyh%CiR{lVT8{v~wmf*=&pQ}Z%oJ8pU5xtr8ZD1^A2_8%Uzl z5kYD2 zr`FAF@?MxpqP(gQ7cLz^`lVw?JhPT>lTBO$3J9n1dK4FO7A=gcjj$5vtz5^_cqUoy zjEn5w_QtU%6=JW>i%cp-<8MBTuw_Ql5|@Seu9%}JNPt5 z9is;!E@}{$o>o+87Z|&*yrNXB-~ApoGo6}R0Yy<7lKV7Em(T*Zng~l?v0GWX5#rQ4 zvJOD{8*d5)F%pc_g8^M_n{H z^)Ku``Yrpo2s`6yAS{ioClAjpP>{4o$MKAeI%P?<7v8ZilO@C#H{lhL^YI11s9b69I8EH(S&H~$O7IlSQK8;0@yAHc{y_sC5qp*bckONHz$DFtf^ zm|(5F)dO2UklNZxe7pWH*u4KEWi~A%^f2tSx?*eabQuGUQ1}(%aAhxQ?nyWO%k3*M zpxZ3gTMjqbIWh&6&=f2jx8aznvaqszQ*6CPPPbh$Hvz7$?&y;+ovU0Y&u@{k$#Fr# zWdrPh)<#%*?&K_34$(-CUh$Lg%-w4+rvLr$@KCQDZc%#L=moQuY!(TuFCa97J@zD? zy-A!9GYVcFew?&=JMJ)u>e^;B6RnM~6z54ZmITlnpEUT-c>I=S%*H5>V>C;)rV!K| zj1tpGrmci%E`e;3_*PeDq+;a?>&jvj)6uE@aBhh?dgeDNmz+A}Y&eN#pfwSeOeJLJ zpqdk{siUOy65QRrtTy6XTesEcx1?U6Ky36t=29+3O2!`A zr9YNNNz;e zt!-)MZ6>MEd}5j^d-&99e6nf|y2edFkY*)BZF!vy_hB39Xq`{;sxohtj^u!K1|wUE z?dIl*Apd9_JT7yTBFLx%mX0Q%H4v79J;(^ndjDEBwf2}D$!Sd)>X2;Q zy9il1$1$kqtvm?Dru%Isgu3$(s&}*Bb1bDmsLzDZP|6OJi{#}5ZeraqRCy!YbVK6l zUr=1eCsCFTuR#;w=!B&h&d(r5WTAJbDVQ+m(I%O!R67=W=t@&iQ}tbKVP$!5uOL2Z zV$ac~+!WL$cC1v)y2=`u3MjB9wxNRk9XTNNP^a@UY*pAFSyNYuOZlgG;7WGE8D`)= zgf_kfaM9K9s$7{@RaaNdI#A3x)xf-|^%ul`flDat2K~66qo;qBawr^f37RmOa!an$ zWe?m8y;FcNaqwLErC3cQ1zBjUeonzTM1*uQe~uzwI+m4t0*R;AprN52*N=X|czG*6 z0tr)w{}p?WF2nj=|3+N&5cKRc)%>|aEh7_?hPA0|y9~MpA{ui!(_iJzxaP_7#3{{Arr@JTSKlDC={X^kiqJ#fYQD!wTkxQRKFVEsr?gN@Hf)5WOIUKb@@b67bTwz%2S%*|f{`6#5AuA* zeiW5vB0R+O@AgU;i=}xrHeh-Jb?KxY%5W5+3e8JO=gnxeeH#^d0}067-VR zXq|N@m@A3a)T)7Zx(j?xyYOGnY(`nug6B6}#Nw4N;it{7W5>Zyn2U5bDyxbO?xvo7 za5U9@_t@T?=q~I#a4TcPXR53lBQT|!a3VO9-fr=*7rea!@jLrCg~q|WG#gC{Y(z^f z-DwXHGsj9h-D1b777`W|N!pN>r&PWoX}_kf3Lmbx18ssk8m&Wno?M?k`Fe$*ZD<1T z-LKmGlF9S6*EUyfSf4q(Q&Qhhhc#PX!~Bo>@ur)v3a+iQK|lX6d*2>JQbx9_4h<;j zUytet0jstz#i_Id(!2zkzl29lB|=;rrF#9|pDI89brH^9I*pR}S`-bbLs_?alqb}q zGL~K2MSne}A_!*g-lz4x(h;|?V5JB4TN+X7$j z5UcCZ7NDQ6);x$^hd<~0egP4sWhL3F2y8djDWe&02!Xyq=n~Td9l|^EbCXgI<8*p5 z5kZ<%kD~r*UU+J1tsQSg`s7T1pMXb00>XyY)z`V?6kcQ!QM_h&XlM5E+=Xt;1s)eY zlsh|gb*ET5#~z_XSo&MMc}GofWB2BwUOX?i;4I$yY#7Qb@}#U)5ZAk~8ICMrRg(>P zdCD^v*(_btE*?f|=6)WO5fT_H7q&*%l*(MVctpG@Ur)U8>Z=$&Y6Mnod>(htpvWYh z$&)QzgUBEemXY;Ch%=N^bQ3vC{3Nmxipz_z@AxiCl$i=~-UgaDgY243U9*4`-LFe4 zN+^+4=2e#Ng=pkQPW~DEvh_8dqQiS*(y)gyyh{|Mzs#*v2iHoba)r_OuM+6$(@^g* zgppRXFXt7W!=hzVm1HHXLKET0I;=|4(CQ}!dL~Tkj!OMABj*@zj(iyDU0pp)>bd4p zX3jBQHZ?5`eS5{?sd-Ouvs7y4LDjWX38o>{+bL$Wa`XeukWC{}C$dD!_GF0Up^08U zg*dqz*`#TTa_WqZhxqdCwZI0c5VUGD1*_03=_)CmnR66BZ+V03qY`}1y1@68D*_I= z!i8ryz_sQOQ*+K>_2%dC=Ku9&684<38d3$mLlfc1gjGE(vBzsjB%Wu2EK(w)H6et# zLQN4;f~ou*{X;J*67S=M=bl7ha3Fr&{sxReuriU<@7MK4=`?CpChHH;ATE-eG=a#Y z5T!2J)5v8+H&cg}0bQGP;Mowpi#wDD3rceF>+Z(sW-HKLStqbec{-^oQ=4<|j!alur(XWU!b0NIoi0VvXBn5ktU$WdoO_4LWffJa&R15X zyoQgLsFxKN6^V8EdE+0i|AX6RkDmF}>JPx!J&qjssB(_5``|czi&Th1$%nHX8E|C6(qAkqaA(tZ zH#YUKUP99Hn}wxW{8(K>jhdiFh-s?Msv5D?J#zGzod56xw_)t)QP{fg17?a6ED}>$ zpYk5sx9z7itdD{?8HS^PTDsj3=Y5D_=|1+Cu3NQr>4_Cvzb+uXnnVAII#a|Z3-8Z^2#d<`2-OJ9s53;x-yNMeok7_ zVU>9GQq`%&!guKmRHqOlCU`haS&n->|)$C;}fgu zGKO9WGqHv_B=ke3Nq?mAYL31{QGQu<*@xe4`3jc{j;s<83I@Av0 zWOq(z;OK;v=WMF2tK|BqiU1mNYC(CDIf_6tm(aWf7Kx;&S1KGF7$eU^yKS4Au%eV5 z&Pe-(!Sw;l9|eIrUEMofr_55V$?Jp&Ml{O zqw=+zvY#5O&)+A42V4_K!frE=vM=C3(l_j*s#6it*tV%Oa?o7m zm50HSI*^uiP(ENxS}FH+LJ{h#5_vOCaCH-dk}IprRKzq^mr?1a&^7{ENwZ(_T<(rSS3yz_=k#OQpzh@;sbS!O;n;>Lr!1Cf%IGlAvqIflKvv z<}XZ!n@+S4`1*^Z6bef&SkzZd5tww$h_J4F+nAJn@`*p==&99>l>ug*8i3))F}(jH zN;mX9bXhnd!_f(=3h<>EQnN(kglJ$cLMDlYTGqUJVcdZ0c)&(n=P|sjBym;e!H_t{ zJ_=W6+L}806}ik`o>dFLCm;Va)0uaojG5A;`+<>q&3$x%0nzB0Ab0R6Fy&b|A;ZxL ztC|Yk&CN^RHr6rXBoHciqTpuNN*$jQm1(|}=TC2@b*s2eYmH=uvW((dy%T03L9fdm zK5_(4Kl3-ao}i#W3>?@Mhm*fEE2*KaUz0atz++0!7tORi7*23FI$>2qO8tD=nC%#e znWFEy-a|lxr+`ywN9ly~B}Er$JUlMtoiK48RJx_9&Ye4x^0Ozr&4XwN?=4!4FTY+U zA1Ae7-_b8&3IZ&v9&znQFey7*I)!ZIFTx29M<;9>X@i<|3V1drt$5PWG)_9yL{NeAWiO*o84N3YyAH1@w$yQr5>S zSh}5oEIop29F9!bj>?^=@ZJVwl{Sp&n;F@Mr4ppn{-WRBPyZcC{@&nXwn$g$xVLvO z9=T&JLW9IxCDPt-^KJNK$+ws@=X&fu__0YcT5}!puH81{bLbi!US#}@#x)E_ChWt? zor%Lm>yD`ok&*y3>hw_Zx6-L($I91lpQrzlnQ^~f;gQmk##~daH1^YPU_b}VyMGsx zwE8s%*tP%m_rD_X=rL^D_l_L@U=HTU+_TGUbnh}#DoR1Q^oM!j8ik`0mfpS`3F7M$ zjtJKDn)6O6L4h*q7i9^1P;rRI@0FHaLdK=T>Uspj=GvTegyPrVw=I{n{d?XiU+AaR zKclE59ch`njoz;|F{ zw@I-;#8yQy>U{Cwg45?WnY0pEB9gId2L!akowJr=Xg^iMfZw*hjPl9?ljj+`f9>^s zyhAW;+*`_1ABVWM!!-v-BrJ{Gyk42nxBD#yFVWP?rD&|AQ}R4xq`xZtr^I4D3-szLu*T_@=HnVu z4IGWIw6OMJ@4i=NOSH}w}v!$mS04`VK=yz>QGWH4^sFEq7G{eOX$D)`daMR z_W?}7lq58lw1|r6%gkQ;(MGQxWHFcR%C&P%jFjEU=Pig!xPMrA}7c%#tIUqe*fjPKF;7Ej>t=#F^Z9+{1~- zzcqOsYp&U?^9(_<*Ek9qSFu2csMx|?GUJX+Rvfz>L0>OKHTh8F@eB(gCxiw{*?Y zvWrSvGwJ>Gk-iJ@ZySrNv@_-?^Ab>yi##J*P(U;y!j0VhKNAQ5$8kk551H3y3NzMv za<<`Rc-hr(hO9B-kYS$Ykmm|xCmgX*$vW~Z z59R!De?Y%~fBydVdH18AERv&kYj~};cjg<)3-9N#bbAIOOCWk#xJRd*cw(Eu$NP=u zX!eY(0M=Hx85*A?LkkWzQCz8ylPs@(X?eiyJb^^^`n|v)JiUy9^wZhXN)o$QM%s{7 z087lA61;Rt7FPY;ryPRL%3k&6Vhho}1@oeU%%%a~ww$$D+R6kx#loCxcrni&B1(xw zgSqhB@4LG`(^hw=4O`okY`z4*8z?OgM&#sjN~%l3W1!c3?stuVxpXj@qFQ}_vW}3) z;>Z1H7!PGv9eAEEY`dO{O<3mBW)(*o`S#}6J*$?6yivoVu3hrF|`m!?Xb(Oj5V!o|$U@VJm0AiVslz44B2De&&jiQ%x&q{PDaRhZ{c zhs)uG9n|cmDK@8P>Em1Z>?~$4vzILN-VhMI4)za!UJ$S(A$vPzzg4)Ov9kzZINiI5 zH7R+A*!j4q4x?SR7wOKs1C}MA*E8c9H~zW+hk(Ubp4E>&Dkq#p(}TNF z(F-@ByPwAeoQ3W z*5PYP2qb?$_*xiiSd9VOC>RzQbfBgtRp0bLk!Y~mYWx@min8H?DSctzG&J7Qa$6af zhT&T468M2_hFcSEMKnh>)1iFQrX4`!j^&BXfXAOOKQTjSvAe-UOCsPy;vwL#K@gm( zj4FSbi5QS(8wct|wx7(HQ3@DqFEbdoLH+F^OdoW!w0PCpfHj@Ckjy^%ah%SG3puU| zVczK(1pF;bx&r3V+jMM`2 z3!zeNlMm8n^4+WkiNIgNH1kM()1?9->xPW@%E}z=7@D6Y-J&v}oe|VYK%mZVIV0*c zojr<0HW52o^h^z@#wkCjrbiXVpguhg2MKohsmQr>+xLt}l?<1E6$0`VPRRe089~RM ZdQQ|O8FEkx*>_FUT+X`_D+yN;{sWb$RLcMW literal 0 HcmV?d00001 diff --git a/airbyte-webapp/public/images/octavia/empty-sources.png b/airbyte-webapp/public/images/octavia/empty-sources.png new file mode 100644 index 0000000000000000000000000000000000000000..460ea9be38875601fd0435365a6aa271b5793e2d GIT binary patch literal 17554 zcmaf4V{^K? z4DQ))2Fy%AMgRz?Ar9u#;6ETBVG0Rh0cCgKD;)@LG?nLXT~E8eAW-qab6yM!S%iu< z;s0HRs3fCpwbX=P7;d)cum4n{iU5nCS`&6lFa&@>l93hzL7jE?{BXK$cTHt=Wp?$! zbj9tEF}q#=%V2+)Za>L&x@q@Ek>%J6{{JIkI2QW>%zw^)K9nIB->Mh)AXI1y)TO9G zXG|s3k{-zmR4Gc8!6DM{+N`QY(wNx)ZE^x*}yazXKvVEK*%N0}YeoR=R6hi)WS!V-z(+(K0STK&#&zQA-OnT;l zKO7}|f9|!!>x38m?WOTPSwh|NV(0Q#TvKwN1$7`>j1II;^4L65&{sj|#ZPx&9Utub?K#DKl6>!IOG;bLRx9gtoze2o4ew^yNF zTd^okWP^+l7$O&Po+D|vuPlyT2OhswJ& z!M`jqYz%#64?|fA(3$huh#3wDhNGl4VXnbUpE06s{-GSl63ax$U2rk^s%? zTU6rjuDz;+izuFSNPpQ$7^CSSHoGcjfnSgg7mc$bGtg-K7Pbh^?gz_pc>v+z`PPe> zFeaOgYfv7+O@6SBUbeVS#}@D{IK^ki-Hh|0*t8@enr>c;&h)T|L0=8Ts&2ZkZ@+T{ zPkJhF_(9gCX&I0FuZH9UJUT&)_pYkvsS3&_qDux;t5D|Qn#0?bL~?SN15CADUHmgZ zG}dbV`hGXXkCNFm+8YWX=BKRxsy-f$XdQ6_`1YP+kQgee+34?z#~NIU3Yz#NS5R^^ znDMbLVq(s;Fz`0f>5?Z<;FlIAY$2m4qE~z)eZ8>e48Nx>jP9plwqx}bs^rC7xI+un zWGWE8NwHb~E&k%&!qyJ5hUHMXALg8Cy|6MFwKz;v3B9li@6`T(WSkt%o}jL{dNK^o zG|-Vv5y|wAie%?^i2PVRxxlB$ykKtdSz)ybLKWFrzyeofB3CikOjZq!7h39F-(+uy zS%{(XlENgz%cQiMWdNnfROZ4gx53Uh8_|3_=7yVd;AV13_Lv0HYV44Vzn^yHD2?}v z^|rHRYtGJ8?=N-{L_r<-emAb>_~@_%c5=z7s^Mi0cZzJX*&Y5q5GeK6FFs?_G{`5d zcIy~P5MTcpdCdlnlpwi4o`HT|ruFWR>JW@^QV_|IO^A^UEqmTe^W$qQNzFjQL=I6n z!Jm$w3t+3l`g{PJ(rSz=50;?RLzSQnznU)MsCPf?v{`M)xbLlV&lOoUtDK~!sToej zs((IS=vJ$D!@d1Sb}C4}Z+7#Fn^2Hrz+it89)?t^#~H@dp+|7@SD3{jEWGsUlBu?} zjfKF84@-QNgUYO1@Jxn9P@8VxzN=X65Dx=`o%VeapQr^hcj?g$pCVxeRUMC`BeSD@ z<=IK1@w4&A@740a>h`H_;HUcsLmG1x!DV=T$a%`kg>ePhLkL7OzRI0yo!3rOM?q6J zcv>vg=wcp?!^5D?@i7Ho_wlbM052w?nC&SXONF8H@lDFX{cbWVA%_7Mh9@*tr|r(% zxD7k_2%lO`d4Z|zb$*97yfU%!;`fA8ZoxSM;wEv_Dv(Ty}>-w!O1FQtJL1;0(KWh z4M1Tba)X1!>+C51{>jsayt;Cvykbdi_gC-frfs(ILlcrzIViyJLeRqvzC^@a*xa1^ zQYZPCnmIT75tvI19(B{exDoosv?oxi%>VHq)T30I^KI$C&ttqMoVynVMz@`y?%Rir zg>UKJ1BI%KYvDxb`GFe$Mw|b`;{$+nx>P`>`dN=xri7sH-5a)dZpi5lBLzs=+!hq| z2ZxV7()wU6SfGK zex{@?n;*NvXQ3sd=pxJpVEr~)Hx^i7*UO2(&?9PDuZj{@T*umzq-Wx2l@JHI?(xAaEXp*ZaREc1uqK%l<9c^V*VjjG219UT2o4cW(2$ds*B6N{ zqPzY}A|QnPj4#=AgvZ{fG2MOT2Wf{)@~oxgM^hhimb5Nl@5=y2vkZ`J84c@y9(jW3 z8G76d+aE;WD`rQr=rSb#gQBYS!#E=Z?%JQ}(%HaZ%N^?03j@q2Bc00Y{EpD|u_Zm_ zcZS4s_jr*w^g^gJMmKhwX0A;ZL37;z*foL~4JX>FH+V20zM?nAD2~z?0Elm_6y(IG z<3}Y=DRprtF0{s%ZgB8mC)QWvoM|jJT~SWqa_d^=D0%71?b5`TX3{x`jPQXe#fLPFeM%4e#u%LfpRa-3fBEEc8gun4e4l z)2;t>6&4QFKU1sPy~Zm4FyKmLv{rUVdwRNi$^XcJO^9mw6>&!%_ezBTpHU* zw4(Aw`#kqfdVBuX0dS@Gzi2)|YyNyGV+`uLzkWLm+|?hfSsGL_g)Vb{$B;77&}qMt zcljucJFd%twFXCy-DPUlTYE6m>& za&TKKP_TiqBkLK21R5B-Z^Vq%&E}iE;+<_)R$e7U9(!h*4pt(G@g2TT@W!qa02SKy5yA1q0XH; zKly7+`quCUNEY&CY-jl8NR8AgASFv%_Vy7R-?u(oBt2MX8Oh1P5CopEvpuhzg`NDt zt2^)L&wX31a43FpWVUrK)(`V`@vcX-$9ygnTXjfMf)-zb&|S~1Sf>jOEq~JR_!*vZ zn(zIfkM#1&IF?$!++k0htF#X}DY?dCBSQ&kM@^QMYDL6x&IRHU>5r*nP~?juHG7M{ z7Q$&+lfhIj8iz@jpYt^1o*9wSS$cZoGrPgJ$-*l{ZEfjGWwK1|t@)4ut8<+1cOWhz z?hlA}FVY)(nxo$wZK$?~h?vaT)xer5+f(nBye<|D3NU93yz!wU$=*1@45E+Yuw5nR+Oeajkzp^V$)(5tIf|))zI1s9u3LD zDP&dOb>BeF-P-z}>D%ca`#+~4dgvdtA_VTc!0J&pov|{gLJ<_? zR7sFiT=Wu#0IH8&?O`+1`wMpLMSQQjO)V~}aQ85gB1xCTG$&kVE~k&44cyN17(D74pMMOmNX&%BY~cUkMW$L)r|=Gq*inb=g(fP-5J&Yc~&^usL?WldLg?eJC?b9Fz%C0*sD#G zw>j!I6eZW*>uGdU{$gy$x4N*bEJ`0TZoSipXP}Egzeuf14h8W9~4^|a3EsAq+B3|(>=8p(4lMpcP*;^^8;?%)!!~=dj>(` zgd1_J=v&dGVqeMr@o6Q1E#+q?@kx;1q)C1=d^@x^ z0KbMcsN*VPiq=9~@Fl(f9lr8=ok{{`$@ALqSuD6`wx7fqWY_<9HG-yK{i1c)Sn5&C zG>jEKjc+UI3G8^40`f;ois`NhYc$QWW9^9Y6@zq_Sp&V%30>Dg<1_XVU5b;&s}3 zmuZ0#e33C3R$bs|RyFGJeA2t%tZO78jCI`QGBfTah=(7(!ShB7jHW-H_mhS*%gOz6 zP#6A+6v@|%BGp<}2=!L0$g~M~g7uBI^C0k$ zUP!5(Y2cs=TU%C->?@p4vV@JD=ttPO+E(77;4P(tec{RG=%}HLU!-#Ny}j&jj~D1t z&lwbD>U26$QIof?O#}yyqOsw4e{9C|tYYXAUbsc!1%3LPV5%*6dp;~$<>R=TuVZda z6IiK%`Q0!=SeAK(YD%APK@i;RkWd7|%sB`zMbvEtVLdVNYgs!d`Zsu7^EZMgD$*1+ zABg;ubHX^?K9e#{7(O2-@p-r|Osk3$kekNbe)tD9R&sG>==j5M}DD=I}=&I8BEq=FMWP5C2idn&^k8sseDs#`K*rTTJV ztPt)=V~0lj0avYFi>lkB>UR6V1=4{5%<(pU51+MXV5`aGSHUM;e$*yD~Qlcg5COI-nPa>Q4 z*?6ZV?5v;hzSQrFn&O+M2N>l>)4z`24<11tWtM)irS}@iRhmtJQNqhLYUGw^D@3Gk zBX+}$|7zf1mKNFNkH(ItF$xmn$bH06y1`bNk%|m1P?bT=VH|ftbSs}a9Uw28i$G>v zh=k9SysW}P^mA1lW#pLCkD%LSlyAg_dr|ru2*A+xwPaOBc!jzp(V`Qk5 zwVXI$OF>RnvhZM~uB_a>E(oYAqhVRb4#KZZ-R{Fd@QYzj@?3vAdgm2VJ^MC8n;X+4i-L;=@np9P9Pjh;M&A*hM}>)egjIKc%jk z^6GshF&yXGi7bf4U@$wF(SQlz!^7J~S{&t}BBjym?_&ZILi<~Uf_zWON#++d`8X4M z(Xlu@JEo5*rJGRTM*e_n?^c5P3?R-7v&xZuGHkUct#M5Q<+vnEs%V$D#hMjT(856ij3AwN551Vw3FPBH0 zCtB5-bttvQ+b+i)OYdiCYr4ivOH^a~x3ufM*J*5l6}8+v+vbWBA61hX0xfGajf`S9 z{#SU}90@d~Q-059vHNBT67Hvt3Z#b=RizL76y|)?{4O(i1aH!^B7^ zQUOBgmhuqiobk!}mh=+p!jyZjBRVBLO)bia#KbcMV<3nBI)t z7ql#GL$E{OhrQuLlatjT4L2(!YYDHNAG9C6^-%Br(&_r@M{vKcsNm@O8srL-=W@Kn zz4qL-LCUYykmtzFx??-L$9tNUsZ#ugb6tMuLfz~l&(xAJTjy(Hxq|&YZv5&b2TZZ? zRnS)|qgS|cq;#@UBPOT?G|*Pk$m|pHeM_4xTve9UTX%%6NwqmoI_C;GE?Q z<_*>!SUF^icb**18cI4#X!HNQk6I!`o;LthNF^__$l+oln1Bui}E#KQ?ufQzeD|af!VQ zb2q6KxTxcR`t)k1HCGmsH!Gw>XI4yQqrNS`xBH82ZIB-5{~Ycd=12sY9#l}}rSl3< znWW4Ba~5Sj0=XrYW{(yBXTXyAeDG4kLnw zzYi-5!(nQ=-`(4b9Tb~Ygi$IA5@U(#H%j^r(1O@um`UhXt6p`gI=I@SUZUW1AdqXX zOZlNLslQ&ol@1BpwxH1j5m$Qh3k&McEnN)Qft4t9AkqSnFot2GRN{FD0pE%2{Z^=N;P`S3&wL@pdHnbO-*ixz$c!a!hhK zyd+0OH#_(&F*0Pui5*=gvn1>og%vTE2MYwv>`&oSz4kxGP}!Fh$Z!`m5YSalqmfXg zqc={@&u>Pa2JHs(2w`CrQ2nZ*R1VZ8rqxU98%)Aw;>hNOvdS-ZDvL@JwZqw-yp+&% z*{mR}$(`d2w2Ze^D%0|3Wl6937QV6(8AST36dmrwAN z$7QrJNCmUJj?p6NS$V1zv3}mNRK8~z(}R0p8yr8Zwk3Jgft`rMLEpKErKJi4)_V-2 zabzUQQ3wCNNa|khoKb>VE8xp>r(@Sr?n?*hA1ves8VfR(LB6EBXx>IJ5?8R-SIbFj zwqc4I3MACJfojkqOBk;;x?)2FSK@UwWHX35m5&aV9WyM?G?Y8*x7vXMgvW@kvY@_L z?;MF7p(po5@M_cl_!UeFX=#Pkq}CPGd}+1!Cb%=I_9W@p%daKXfAVmQl;_raRS4ZJ z9ma*onx2tSlT``yC{BQ=HjHB`q$p;Ps67x5h>*1#W=w{g;#oqF03usT^ z?M>{GMHM2fjT{SP(Xt*et|VV9zQ0=O|OWCcvd^H)Zv z(LuJD^0Ke$8w{^_!v+bpp~S-YvV6g=oF{QK9*~bP)T^qgg^b?g1$smq20yy`HZXem zD6C_nWAV864c|>zo}vcEbYJwH<8hAP+Y<^8!IDf6{-XyKP~4Umd@IWFJ0~E zVrmU7JVU0uMT|FK5T40kqDoyy7r`_<%r@uIib-IE5ZnNXd-?Szq{85JO6r6+_RNEO ze&pG_VVoP@IO|zn_q|D9 zcdsK}4sX*U$j|~6oykZoW{=+U-uGfw1=XrHKMySfDU~vP0=Z_`I~4R}OB8evBTJN< z8}16dz|`e{_t8Z=!nLe=>piwVc2~R&=qZjsASi!CK}wYCL%Jq(>+i`JWp&oXO4&f2 z82P?mjqdSN3`e4|UiZQX#^n7ll?YDber!C!W_`K*xrk|oh$mJCD2Y@?qjSqx8|&g@ zClKW1hR(5PRa+*o^M7E3sH`pzXh_8-?lRW^n|poc;jHlPjwyV*z8)e}qE9_~{D0JK z;V+0wXa%&a1`*8`;Hk+X{u&q*K>Bf-OrSR0+}y+kfI=8PU#^GS?tt*$e1havd*fRe z7d|}0qFzLe15C7hLB#_1h(aYW;tnX+wdHiyXh;o`fCd}Duw#${tGJ552|`x>$ob{2 z46(sp7}mS&&bAS_C-okCBwwOPmI3DneDw#*$glXE)MMP<2DjRfS7iPpkD$j0z5}`6 zJDj{r|1w}Vk9nLxr&ae?)ALzYzv6J%2V=9@2u7n(-5t7X`USyQcv=XBEb`I&(2p=d z1RgWRROHJ8-%|~S%#Mi?a1wnU=RRr5#{e6Y=86ajUw$3s{|;zuWD?MnlN$uW%;9!Jx0uQ16ql15?GJ)zaJ$hZ zE&23!YB;e!nm|iVP7Z$<92nTWx{4Va69;2Q7zx($BqBFO28YG;ciVnrN2jlb%Kg{* zD@nC;CM-*f< zyWN0VWF-h#?eMeGi(N+Nx4B}fYNZ=e^XnSQ;(6o4hX1!`xYoHy76e*zEpO`mb^&AP zAI7OpCel(m+awx^aL&|%E|DzG;f`D+7CQn#;7#bPWkr~zEVJ@-u?n;0{`q=G-I#bS z`MmR;LxFQ%P`Mp1kc*{GUAW?Obbpj$qTYH!(vB$y>tx=@gw2#XqbfOgn4`RKhgGgGh+v`QtzWxlWgjRx?nHdQKgQ6PFYNfirL@F&h^{G&H)5(pFqMLo~ zeaAxhG1Fy8UE${DgHOmdpiPYYT+AHIoHE6~U*VS|E6HOU3@XN$<~06nbR{ldkNX?& zuMoDY$&l+Qiuvw!SHpM^R&lv#IOI|pSAsW|uFP#hzQuj!$XwYg{h18i_z(y;6JWghRTa&+l`txJeQU>`?A}&S@3Z6#^91< zd|{HmLddBGf6BA&rmpn#wR^)#ay%28=@}%UT&L;Z&m6K#W%rVV2n56G0pyd%X~!gF z6gtISi9E^5$$itqaD2Hvl;mm0^UWOhCuT{-Y`ks^YrX&JcDccEH3R5Qvl-U}EsfS3 z9<@GCI=wuSWLhp|Tjekxz(s$Bt%L0$Ax4+blP3mmW0*|#2)2qi&8YXxapP?9jgXh# zypX^5y*vLbP{BoCTHgBWm8YpEMlz;^ait@MBOa5+Xi~C$aq|ra`qtAK-(6lkAvDwo zu;De{EYds6G44w|+`0~@jLb>R$A%NIXl>U-ixJ@af@l*N2u8c%=W%5yh0P3R zZ>H&UeCg)?Q(yI}hLK+W8hj4n@H|G?Ha?qhn!p*B+>Q=St%rp-nVObK&L%pZ=XU{8 znC{f_HgNGMSUGJdU6!W%j{ZUNI#?y-v!H+$hi-v{6p>yu4}NDCK-tlEcba24=V-`` z7qT2q8NfA0HANm@lvh^}Aoxpad;Nk59xoE8X4cSZHe1@E4A_*?FuS|Q7>rM17%}^S zS?K7C0apvEk6urPH6;yAw7h*T(i+yoT5(NZss}Kex)|WC zo^-KD$W02QXoSI`8bl4aM=ZD=Aj;rL>DB#R`fp`T;X(|!c%>ugZJ-31>%g};E{9ZYR}vS%uv+>lbVlu@C-v9zi!`PL%1 zt0dIHXEXMWc%SYMF*A}m=v6;bzpT&z zk{C5D1_jv>MlNJgE0Lhj#0Ea+Z#Nqh_G~Wvc8+tv@a($&zpZ7|@rQ?tSuvIE|Lm~` zgN2fi7sf3QHHcUT$Mm*6#cba4%!k_0 zgH7riXJMK|$Fa~8w7)R8m3T z>fbZ?c|bQL1`$FnydFxkIBL@jFga5Cz*z_!&9JU(;ueHyE9!>hGa5Y#J)e_Ew4gIx z5k)KuSJyc&=jlN>Oh2#G`Fg?QAk;GgS);dQEih_Bw59$I{Bo!!*_}naI9sEpW?=kq zi#S)HY^>iCqc)lgNm}b+G%>g-#r`RA;>r9&Ca1%7m0P{(OJ>`6^#WJiNT&`)TV^zX zvtF~WA1Aa5VMzFK#0sHW%s&Kjtwp?)_0FMF(F;3J*`RR%>TX!`xz3^>C9{EyR=ax#G$7CNeu>UO63!sv#$A!Qu^ z@_c!uKDZygUdT_ahcEzgs?7M~o)sQmjHo)AF42&`#xV%>L`KzsiWW9UsYY|@8~3Hy_&%eEa}fGngAP{4TA!D zD}B}gwD(}9Ic75_*x(zs?!Y=C7MlB=j6 zq>7BPIZjJT9^TLBTI`2{kh9WOW&zWqd(4^k#X3 zSyCgrYwK{?PZT8y$@}^^ujuX>9=<(1Xo0<+s=bx+ekmROyOgGMT782kc_ojKWM2d2 zSDJ8rBuRWopytNui$WJwe3*1qiqD45Fb5EP5*F}+L4~l;n=-wuj)gU))LIkc4CTLel`6LGV3`koBQ!#!g&_ez zd42NX4!=LL+=q=!1p#Sd$AIrm`Z9d3{DkGGr+97?wYCY1>A-}(M1KK<$2G^My^rAM zAfwCc4rAde=i#I2{{o_*FsxhzM>KE z)KlE9%nuCdT^?;RZG>QzK)rv=$s>yA8-)^HJS=A<&Nj@M|GGxZdNhg4$(A&@vZxS- zIa5?j+^-%4>>m&U(&Xd?KKEFS;pQ4iRgjcP3MoTFh6qcKfL`B%^U*l9c>aCOW#6ag z^Smit9kY2b7$;IvPxpsNi6uCmaTv~i(gW7XYe5hoPu@Iv=nrYJF;{U6Om4N>Ll!6# zXGLbYrAF1!4 z>9|}*m|r;T1!Z9|lKKk7=*5MM9SY~!JDjdq$%!VPaze@RM6n|nyzfa^{W)?==)$g$YDto$U!C`K(SgZjCRg<26zOS&@#(0V(z@y>zqY&zl~of-pQBtL!Eeuz z)_3QN{ux(Gf*`F2VMIvmO~*Pg$AK3y^<_nDQe4}KVlK4Qp8R|v`xdK6=45)Ek zxu~!UvqDXlh{ys$TZfm?VT?y%I}@Xt!AQhF3(DfE*+ok^PLm>oV;*0T>*?NsR^x?o zrsO)Q$^Dd4O!%<=*p)eq)pWqIY1a3lS1@w#KmMIk%;gUv*fWRTo^j))(PEQ|_-?Y2 zl}r$Fbt}84z7>Sx!a^___D?h|_)oQr=}AalXzrTYHB26zkv?Wr-w+m^^{6bb6`3E; z7lhMA3+Sg*AsN?L>HfdwmUOtSd+8O8^^M*-*4?7AtvXDrz_qp4r}Msr^6j!YJJe=P z2Z14zqHDl0G4-GS)-~!u5)*K@3`Y#NeJ=iVy#(Ff{?X?ASNPaG4`WKVPK&Ir_E!tx zc`eEPncU%Htl;+k7NvN3-2p;CaTSoCYi=EUUFZ%Br}Bafj+*!>D<)T*`hwX`EfTpNH633bERoHv;nlQMX29N7YVbLn6c zC8*7};G>W01{jBz>7T%LUf1l41Wno&CxG>zW*BhJ`yX1@QFd1)q@Ej!>mN;sLgLrXDn zf@PMMeyVfL9=14B=v;6>w)7{u{&^m)BixVtPgdRRfZ&Vwx2M1A_!7;qTsf?bB62)V zFP2El8>yz-riqppa?(mBairD5Csw9~MvvxT6{>>DX{&4Y1b~Ft)i>ihYL6-wT8L_9 z%ymg?c6K?iH?iG0C|)rSFf6w|MR^Do@PT`M1U5mXn_f=c^fzu0@0ulz8eUT|M@^+r zZqlr0Izi77sK*P>G|XVb#jh2Vjfsc3)T@Ht6G}2ctrTz@7hpWf#4#F3?a>Yets|TH zY7|4cpTI6bBw-D{E_fIJB9zoodtWFEVf2nxys-JN-14OH3H?z)1RV?L%Br3!`fA#n zD>PD5nM+=Pc6uO}a7=uZhMERzg}V^A+& zcRMof??_u#?qHA6j{e~F3nCsPkj`c-yI(CJa7=iio;U&kQ&3@Am@t{)m%{BZox(z~ znmR`R+@F&Dz>#{(442NovVI3CISH%Y@@ADSgcjKPR75_b3yVy%!F;nfr zw(>OSK>j7Cr~{_3bIxs7ReXp-WhDl3s)8pO>erJ<2mTz`sz&&oAE%_noRL;EPdXsV zvb2!%48-gd+_gw$S==u;ped5bet1NcQYXI6P)9p#R%Mp2VUJVfQa-!5S|-(_OJu%X z+Qi8q-L?=2rImH$p=c&JQUJghLmcKK0Zd{-A`|GZ#vII2wO_LmHrTcx1f z2Tii@bc4R*A|W!L)#(RN9690mZuLi?Kz3juK?OvYLaDc0Wpa+5)p%|rANjQY+ERd-S;or=DH6 zx0wNfW$%oJhdw1so7^IDADsg_VsiWS%7`ik@*bwg=X}mf^Tl!}k1Hs#?AGyFZ@m%x=vUJ&~Me&;F^zd!`cfBBn{&Pu&`??$ZZ_DTza8cY|X+U$_y zq!-e8))!dQVVMjfs`CUQ66oo3$J1 zKUiyzw7zUl$1iO;gH^qCPrUuvn@egiOI2l{qK|eCDMnu1KbYv*)e=E^FC8x;Yu#)b z98a@^N@S8gF+0cm_)q(cPuFt~3k)1$e-+QJ53^a7JoUYsYnwc8q-J8eLx?9Lii`pR z(E}uL$;1o0Gy2t(dj8L@RTDf9TB4gn*}cB2FS+lQ5{fN=qXS#Td6m8dyX|D0P6mrl z&x4um(ld%HAY@`uLj_AO&&$nt;GXO0onVE9bfbQ;M7)Fc0687Hcx<}e4aG-8LCLjMv!?9oRMi{uiRLw1K4YC-uvi?U%m zarU@%=cT$o^lKs0IYmJ8@TpRGW>+$qHOW^4J%Q^>va*9d7kNf$F-ZY|gUR3x;{vU% ziOS0|(^H*+$m4A~JJ8voy0PP5e7NyLH< zSxF=B9$%C7ri7&$Ap{k6P@#`*3t!xUfS8K+Z?E%Lrp=^W?e?<^+r=_Sha4@*2{PZ; zo{d4}V$LKZ>KgFS;>`t*q*N%0TL(vB#I|Z$PL|=Ky#QxjIjI;}xW4)6NA<-7i)}}G zbM+wg-9{CI!DCy5cqeXD>a_xE55(oq*)*K2tN3lAumtigD4BGbO#B|G3~j?3l_W090gm2vfY zw2}#JQwh$6=yvSZZI#%~Q-91uGDy}VLq-?BSK4J`CiyU2WSXez4yZOPh6 z&ZX-S!n$}6CDmAS4NGO`uk-?+^ysnaiuoucKG74-+aML6l4aeZ38a!yN_2&Y#`u?S z30bk8n?l*%dEnfLO+`5s(r3vZh}yBT-=mQ;vOhtJgFZlnnCd(Q1_DC#{9w*@$5vcj zVq#mkSV!~X-Tx>qus^4E#>P&;6svmtA{b1J;!@RPP~?Ex^Rx6a+mZ>A02XaxrZb;G zNj}?P*NWYQpH(%lS1ujXep(-@vpY4}2MXe#rzEa&!9RWkAX7s0Wl9so?8GpYF83H3 zT7;s?5BICg96k!YgN_uZIO-Kmi>O#so0nqM+AicDCN>I!8e&&XmOqj~aX9YkJ$2Ug z-Z2CX(waB-xa+}=a>jR89yDPD4TAjx9(%|ltuNllcIZ)Fc97F5jm zORdA-V~h;SkZ3@9f0bxJbG~8Fo>!GZPH~y4FUA^frYHLxpY`sl4{T&k05!?NO$_ zjYKTY@Vcw_>;eiJT6m`i&w^*7e_ z&J&om=>IgB{P~ZLwhDi~20SS_hdOTnaG$mp{EKP<8D0I;TnO%L6g9#N_LvlK6NAlT z)E&S20FSSRyVe>~0E(I4$$xZ~D2s~Sk*M>9?4q#ls(Pl^T^NWsG<2!cId$c1*W%sY zONGI#VFh2`f~iQ|dSPOAF<~h1GU~3AvGHze)knwlSa4IUd&`->%oD7zug4$0k2^fn zWXKm*1WVI}R5Ua*B5sQD!r~_3daY1~ziFl6%S$r0ZEUfShY~P~0;GrZa z&k9E~=~<)~u@&$Mi9kFTkM^IEZ^{58lQY2VgWsL;zdA+t;-zB7Rb`r(uTSw$s2JziQAou3;SB==0aj#VCJz5GRw{)gQDRZt2vh`+V(h1Kl=(z4G9r);a>H z{R@=HKbD@;9%ae90ZddFPsq>l`|+5)YVz(-6=|`UD@8S1+F41l@}EyHHuR)6H?=RP z43CQ&c?ns0frrh!qBpDP5_~@UhAeRNqmY;&RNjklP-0}X5OcT5Zd{|$LO=5pNbwE8w^FXr-tW=O5BS3pZzaQct16*m4d~~y`ynKmg(8fgH1y(KwuXAq*n5itU+7& z_lEP6KUqy3zO$}{cwFD;=L&^y5cWKJ+;wB<@aolkoY2@bkCCdR=8dDntpiW!{cO~^ z>Aw&Pf{p|5AQI%%w7n&{XS?lYl#Dp?%oJb6q}mWK6COoH_Pv4OpnD?nGEW!2+(rTX zI^$xqp_N*Q6)BCHga;O5--|^Xa{OL9X2C3H#8KuZxq~2vkNu_gC-e*E%GA?^V1=7} zl_+1wJ9A0DQ8+fR`QM+#JrC^@(VKP^cydy2hAkadZ&amWtd>?gLC9UN58bUbe-|_D zrC{uXD!^k!)pfO+inU z`f;F0?`q>8n?x!#0~7s$zmd{iM&p(&lK0^c(Ta`6D}O!2jdbRD_uGPjhJV_Aw2#z- zRtqpgJ-P=IASXT7X-!1)YG0smiaB2@v|N~YqUect!*R4>SjHF@5c|6q2}e}qF+1ay;cY=Irv*~ih(i1hX{P+{m=A%397cgl%j!clk944s!-3k1 ztI;l@nlOu;Y#GS(Uy|k@7wzdVhx@77ws-)>6Y#}2^wba>h#2vu@{#N+{Nf|8jgsPM z)j1{)6ZLLxm4Hb;dRgtzo^j*tiIbUCT85Pa8nLmIgao|nwHHCEyj~7K=Ml5nj#<{a z$&2H8NHS@);W#sALk1a-2M>H7{LGsZ+F+fBx!Y7N=$jY+54Dcf{e%Rbc$io!OtY~I1unwgI&9iZIfqxgt5<^O^W2u?H9T~9E6R~#`!km99r zY2IawI(%l^Ti%hrB}EpJP{M^t9?2J0cL7!T`a;V6>qW!A?kR_P<@;Y-e_{Bi4;GBP(qb>K zY>#Kp9<6-yGCH<>-?~XQ*HglX1AR_67@qnCB%GclyY}Ps zh}Z**OcTJko1kD1BS;p@vn?&Z6UIuik#~MP%u^@`sNK z_iTUU`X~SH{?tuI7N_Foo;dZu%S%34UFwx98+2=_@{K*K)lAfH*MFD! zb7L1*Vg+-KfVZFN_2pYQ11Bh{<+Ym3%H@=)ShjBQ8l}U`K{pPoo?-g8FY(1CMaUmj3v(>!Z{691fK^w;hiC_3r4D;uXJcwc~ZDkfF7?pm*54rb9`GcVu|3+Ei zwEfYpb{;vCN2SR{Z+NQKmE6Z`q2EAHH7u%AOlJ!z=tt%q2CQSX05D zt(jZSGs?1%>5p1QJA*&X!eesm5<9_z4`j4h~H=Lq&a(3bmDD&D^9Ir-0^j{ zR(z)_FvCaAf2MJE^TP!X`hsSMDPP!l*JVq;B)`8=dmnB?YcYUJwlD%#TTNR}EmdRN#MYujp zjl3dgDyuiq(@SO3?X0^owp&yx-$repA78w7dinv=pSfy5A~FnpFU~!#mg4qYF6+3G zd&$Ibo7ydgYwYT5l{hE0lpbQ5u}9`;|L#|Jic3{Gib^yTh-a+hi> z0`sJPI#2BlI3F*xXWhwmkCw>-dz2`d8uQqTVnFwiQ$@oLgrqjI$U3! zCr#X=5&P(=iIKN?dQcNI`JA`_B_9+t?$5)E1^a_!elQ#F@qE zcgTwG$-!$rcjR{c)sXOd6yxf3>F-qc5ZTZtmc*R3LBw$!FeP^ie$+1wdD(8*)cXl| Pgb0JDtDnm{r-UW|n~N_X literal 0 HcmV?d00001 diff --git a/airbyte-webapp/src/components/EmptyResourceListView/EmptyResourceListView.tsx b/airbyte-webapp/src/components/EmptyResourceListView/EmptyResourceListView.tsx new file mode 100644 index 00000000000000..b70f0d5518060f --- /dev/null +++ b/airbyte-webapp/src/components/EmptyResourceListView/EmptyResourceListView.tsx @@ -0,0 +1,96 @@ +import { useMemo } from "react"; +import { FormattedMessage } from "react-intl"; +import styled from "styled-components"; + +import { Button, H2 } from "components/base"; + +interface EmptyResourceListViewProps { + resourceType: "connections" | "destinations" | "sources"; + onCreateClick: () => void; + disableCreateButton?: boolean; +} + +const Container = styled.div` + display: flex; + flex-direction: column; + align-items: center; + justify-content: center; + height: 100%; +`; + +export const Heading = styled(H2)` + font-weight: 700; + font-size: 24px; + line-height: 29px; + max-width: 386px; + text-align: center; + strong { + color: ${({ theme }) => theme.redColor}; + } +`; + +const IllustrationContainer = styled(Container)` + position: relative; + width: 592px; + height: 276px; + + pointer-events: none; + user-select: none; +`; + +const OctaviaImg = styled.img` + max-height: 203px; + max-width: 100%; + z-index: 1; +`; + +export const BowtieImg = styled.img` + position: absolute; + + &.empty-list-bowtie--right { + right: 0; + transform: scaleX(-1); + } + + &.empty-list-bowtie--left { + left: 0; + } +`; + +export const EmptyResourceListView: React.FC = ({ + resourceType, + onCreateClick, + disableCreateButton, +}) => { + const { headingMessageId, buttonMessageId, singularResourceType } = useMemo(() => { + const singularResourceType = resourceType.substring(0, resourceType.length - 1); + const baseMessageId = resourceType === "connections" ? singularResourceType : resourceType; + + const headingMessageId = `${baseMessageId}.description`; + const buttonMessageId = `${baseMessageId}.new${ + singularResourceType.substring(0, 1).toUpperCase() + singularResourceType.substring(1) + }`; + + return { headingMessageId, buttonMessageId, singularResourceType }; + }, [resourceType]); + + return ( + + + + + + {resourceType !== "destinations" && ( + + )} + {resourceType !== "sources" && ( + + )} + + + + + ); +}; diff --git a/airbyte-webapp/src/components/EmptyResourceListView/index.ts b/airbyte-webapp/src/components/EmptyResourceListView/index.ts new file mode 100644 index 00000000000000..585fb92a64ce4c --- /dev/null +++ b/airbyte-webapp/src/components/EmptyResourceListView/index.ts @@ -0,0 +1 @@ +export * from "./EmptyResourceListView"; diff --git a/airbyte-webapp/src/components/base/Button/Button.tsx b/airbyte-webapp/src/components/base/Button/Button.tsx index fb3d2c16388ebf..3e304cce814f40 100644 --- a/airbyte-webapp/src/components/base/Button/Button.tsx +++ b/airbyte-webapp/src/components/base/Button/Button.tsx @@ -1,9 +1,9 @@ import styled from "styled-components"; import { Theme } from "theme"; -import { IProps } from "./types"; +import { ButtonProps } from "./types"; -type IStyleProps = IProps & { theme: Theme }; +type IStyleProps = ButtonProps & { theme: Theme }; const getBorderColor = (props: IStyleProps) => { if ((props.secondary && props.wasActive) || props.iconOnly) { @@ -96,14 +96,14 @@ const getPadding = (props: IStyleProps) => { return "5px 16px"; }; -const Button = styled.button` +const Button = styled.button` width: ${(props) => (props.full ? "100%" : "auto")}; display: ${(props) => (props.full ? "block" : "inline-block")}; border: 1px solid ${(props) => getBorderColor(props)}; outline: none; border-radius: 4px; padding: ${(props) => getPadding(props)}; - font-weight: ${(props) => (props.size === "xl" ? 300 : 500)}; + font-weight: ${(props) => (props.size === "xl" ? 600 : 500)}; font-size: ${(props) => getFontSize(props)}px; /* TODO: should try to get rid of line-height altogether */ line-height: ${(props) => (props.size === "xl" ? "initial" : "15px")}; diff --git a/airbyte-webapp/src/components/base/Button/LoadingButton.tsx b/airbyte-webapp/src/components/base/Button/LoadingButton.tsx index db3250d2fe557c..2c621077d5c459 100644 --- a/airbyte-webapp/src/components/base/Button/LoadingButton.tsx +++ b/airbyte-webapp/src/components/base/Button/LoadingButton.tsx @@ -4,7 +4,7 @@ import React from "react"; import styled, { keyframes } from "styled-components"; import Button from "./Button"; -import { IProps } from "./types"; +import { ButtonProps } from "./types"; export const SpinAnimation = keyframes` 0% { @@ -15,7 +15,7 @@ export const SpinAnimation = keyframes` } `; -const SymbolSpinner = styled(FontAwesomeIcon)` +const SymbolSpinner = styled(FontAwesomeIcon)` display: inline-block; font-size: 18px; position: absolute; @@ -25,7 +25,7 @@ const SymbolSpinner = styled(FontAwesomeIcon)` margin: -1px 0 -3px -9px; `; -const ButtonView = styled(Button)` +const ButtonView = styled(Button)` pointer-events: none; background: ${({ theme }) => theme.primaryColor25}; border-color: transparent; @@ -36,7 +36,7 @@ const Invisible = styled.div` color: rgba(255, 255, 255, 0); `; -const LoadingButton: React.FC = (props) => { +const LoadingButton: React.FC = (props) => { if (props.isLoading) { return ( diff --git a/airbyte-webapp/src/components/base/Button/types.tsx b/airbyte-webapp/src/components/base/Button/types.tsx index 39d9464de28a85..63abaa5049eb39 100644 --- a/airbyte-webapp/src/components/base/Button/types.tsx +++ b/airbyte-webapp/src/components/base/Button/types.tsx @@ -1,4 +1,4 @@ -export type IProps = { +export interface ButtonProps extends React.ButtonHTMLAttributes { full?: boolean; danger?: boolean; secondary?: boolean; @@ -7,4 +7,4 @@ export type IProps = { wasActive?: boolean; clickable?: boolean; size?: "m" | "xl"; -} & React.ButtonHTMLAttributes; +} diff --git a/airbyte-webapp/src/locales/en.json b/airbyte-webapp/src/locales/en.json index 2c1378b5b4adb2..7e6de2b13dae93 100644 --- a/airbyte-webapp/src/locales/en.json +++ b/airbyte-webapp/src/locales/en.json @@ -232,12 +232,13 @@ "onboarding.syncFailed": "Your sync is failed. Please try again", "onboarding.startAgain": "Your sync was cancelled. You can start it again", + "sources.description": "Sources Sources are where you want to pull data from.", "sources.searchIncremental": "Search cursor value for incremental", "sources.incrementalDefault": "{value} (default)", "sources.incrementalSourceCursor": "Incremental - source-defined cursor", "sources.full_refresh": "Full refresh", "sources.incremental": "Incremental - based on...", - "sources.newSource": "+ new source", + "sources.newSource": "+ New source", "sources.newSourceTitle": "New Source", "sources.selectSource": "Select a source", "sources.status": "Status", @@ -291,7 +292,8 @@ "sources.lastAttempt": "Last attempt:", "destination.destinationSettings": "Destination Settings", - "destination.newDestination": "+ new destination", + "destinations.newDestination": "+ New destination", + "destinations.description": "Destinations are where you send or push your data to.", "destinations.noDestinations": "Destination list is empty", "destinations.noSources": "No sources yet", "destinations.addSourceReplicateData": "Add sources where to replicate data from.", @@ -309,6 +311,7 @@ "connection.warningUpdateSchema": "WARNING! Updating the schema will delete all the data for this connection in your destination and start syncing from scratch.", "connection.title": "Connection", + "connection.description": "Connections link Sources to Destinations.", "connection.fromTo": "{source} → {destination}", "connection.connectionSettings": "Connection settings", "connection.testsPassed": "All connection tests passed", @@ -321,7 +324,7 @@ "connection.resetData": "Reset your data", "connection.updateSchema": "Refresh source schema", "connection.updateSchemaText": "WARNING! Updating the schema will delete all the data for this connection in your destination and start syncing from scratch. Are you sure you want to do this?", - "connection.newConnection": "+ new connection", + "connection.newConnection": "+ New connection", "connection.newConnectionTitle": "New connection", "connection.noConnections": "Connection list is empty", "connection.disabledConnection": "Disabled connection", diff --git a/airbyte-webapp/src/pages/ConnectionPage/pages/AllConnectionsPage/AllConnectionsPage.tsx b/airbyte-webapp/src/pages/ConnectionPage/pages/AllConnectionsPage/AllConnectionsPage.tsx index 03f365770065f9..3a402f14589bd6 100644 --- a/airbyte-webapp/src/pages/ConnectionPage/pages/AllConnectionsPage/AllConnectionsPage.tsx +++ b/airbyte-webapp/src/pages/ConnectionPage/pages/AllConnectionsPage/AllConnectionsPage.tsx @@ -2,8 +2,8 @@ import React, { Suspense } from "react"; import { FormattedMessage } from "react-intl"; import { Button, LoadingPage, MainPageWithScroll, PageTitle } from "components"; +import { EmptyResourceListView } from "components/EmptyResourceListView"; import HeadTitle from "components/HeadTitle"; -import Placeholder, { ResourceTypes } from "components/Placeholder"; import { FeatureItem, useFeatureService } from "hooks/services/Feature"; import { useConnectionList } from "hooks/services/useConnectionHook"; @@ -19,30 +19,34 @@ const AllConnectionsPage: React.FC = () => { const { hasFeature } = useFeatureService(); const allowCreateConnection = hasFeature(FeatureItem.AllowCreateConnection); - const onClick = () => push(`${RoutePaths.ConnectionNew}`); + const onCreateClick = () => push(`${RoutePaths.ConnectionNew}`); return ( - } - pageTitle={ - } - endComponent={ - + }> + {connections.length ? ( + } + pageTitle={ + } + endComponent={ + + } + /> } - /> - } - > - }> - {connections.length ? ( + > - ) : ( - - )} - - + + ) : ( + + )} + ); }; diff --git a/airbyte-webapp/src/pages/DestinationPage/pages/AllDestinationsPage/AllDestinationsPage.tsx b/airbyte-webapp/src/pages/DestinationPage/pages/AllDestinationsPage/AllDestinationsPage.tsx index 63a2cd1c6ebce1..f7bbd69a248788 100644 --- a/airbyte-webapp/src/pages/DestinationPage/pages/AllDestinationsPage/AllDestinationsPage.tsx +++ b/airbyte-webapp/src/pages/DestinationPage/pages/AllDestinationsPage/AllDestinationsPage.tsx @@ -2,9 +2,9 @@ import React from "react"; import { FormattedMessage } from "react-intl"; import { Button, MainPageWithScroll } from "components"; +import { EmptyResourceListView } from "components/EmptyResourceListView"; import HeadTitle from "components/HeadTitle"; import PageTitle from "components/PageTitle"; -import Placeholder, { ResourceTypes } from "components/Placeholder"; import { useDestinationList } from "hooks/services/useDestinationHook"; import useRouter from "hooks/useRouter"; @@ -18,7 +18,7 @@ const AllDestinationsPage: React.FC = () => { const onCreateDestination = () => push(`${RoutePaths.DestinationNew}`); - return ( + return destinations.length ? ( } pageTitle={ @@ -26,18 +26,16 @@ const AllDestinationsPage: React.FC = () => { title={} endComponent={ } /> } > - {destinations.length ? ( - - ) : ( - - )} + + ) : ( + ); }; diff --git a/airbyte-webapp/src/pages/SourcesPage/pages/AllSourcesPage/AllSourcesPage.tsx b/airbyte-webapp/src/pages/SourcesPage/pages/AllSourcesPage/AllSourcesPage.tsx index 3e47d6f4068c7c..32d75a296dc048 100644 --- a/airbyte-webapp/src/pages/SourcesPage/pages/AllSourcesPage/AllSourcesPage.tsx +++ b/airbyte-webapp/src/pages/SourcesPage/pages/AllSourcesPage/AllSourcesPage.tsx @@ -2,9 +2,9 @@ import React from "react"; import { FormattedMessage } from "react-intl"; import { Button, MainPageWithScroll } from "components"; +import { EmptyResourceListView } from "components/EmptyResourceListView"; import HeadTitle from "components/HeadTitle"; import PageTitle from "components/PageTitle"; -import Placeholder, { ResourceTypes } from "components/Placeholder"; import { useSourceList } from "hooks/services/useSourceHook"; import useRouter from "hooks/useRouter"; @@ -17,7 +17,7 @@ const AllSourcesPage: React.FC = () => { const { sources } = useSourceList(); const onCreateSource = () => push(`${RoutePaths.SourceNew}`); - return ( + return sources.length ? ( } pageTitle={ @@ -31,8 +31,10 @@ const AllSourcesPage: React.FC = () => { /> } > - {sources.length ? : } + + ) : ( + ); }; diff --git a/airbyte-webapp/src/views/Connector/ServiceForm/components/Sections/auth/GoogleAuthButton.tsx b/airbyte-webapp/src/views/Connector/ServiceForm/components/Sections/auth/GoogleAuthButton.tsx index b23bd04d92a31c..75efe97b826d75 100644 --- a/airbyte-webapp/src/views/Connector/ServiceForm/components/Sections/auth/GoogleAuthButton.tsx +++ b/airbyte-webapp/src/views/Connector/ServiceForm/components/Sections/auth/GoogleAuthButton.tsx @@ -1,9 +1,9 @@ import React from "react"; import styled from "styled-components"; -import { IProps } from "components/base/Button/types"; +import { ButtonProps } from "components/base/Button/types"; -const StyledButton = styled.button` +const StyledButton = styled.button` align-items: center; background: #4285f4; border: 0 solid #4285f4; From 9ee1f791ac0fb5aab117869db8531c524e2b7736 Mon Sep 17 00:00:00 2001 From: Octavia Squidington III <90398440+octavia-squidington-iii@users.noreply.github.com> Date: Wed, 27 Apr 2022 17:59:59 -0300 Subject: [PATCH 013/152] Bump Airbyte version from 0.36.3-alpha to 0.36.4-alpha (#12415) Co-authored-by: terencecho --- .bumpversion.cfg | 2 +- .env | 2 +- airbyte-bootloader/Dockerfile | 2 +- airbyte-container-orchestrator/Dockerfile | 2 +- airbyte-metrics/reporter/Dockerfile | 2 +- airbyte-scheduler/app/Dockerfile | 2 +- airbyte-server/Dockerfile | 2 +- airbyte-webapp/package-lock.json | 4 ++-- airbyte-webapp/package.json | 2 +- airbyte-workers/Dockerfile | 2 +- charts/airbyte/Chart.yaml | 2 +- charts/airbyte/README.md | 10 +++++----- charts/airbyte/values.yaml | 10 +++++----- docs/operator-guides/upgrading-airbyte.md | 2 +- kube/overlays/stable-with-resource-limits/.env | 2 +- .../stable-with-resource-limits/kustomization.yaml | 12 ++++++------ kube/overlays/stable/.env | 2 +- kube/overlays/stable/kustomization.yaml | 12 ++++++------ octavia-cli/Dockerfile | 2 +- octavia-cli/README.md | 2 +- octavia-cli/install.sh | 2 +- octavia-cli/setup.py | 2 +- 22 files changed, 41 insertions(+), 41 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 34b2b23072f14d..79d6dfa7449939 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.36.3-alpha +current_version = 0.36.4-alpha commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\-[a-z]+)? diff --git a/.env b/.env index 428dc71fdc9b4a..7ac8ef4ad13eb1 100644 --- a/.env +++ b/.env @@ -10,7 +10,7 @@ ### SHARED ### -VERSION=0.36.3-alpha +VERSION=0.36.4-alpha # When using the airbyte-db via default docker image CONFIG_ROOT=/data diff --git a/airbyte-bootloader/Dockerfile b/airbyte-bootloader/Dockerfile index e2753b0957d2ab..68acc05b4ce05c 100644 --- a/airbyte-bootloader/Dockerfile +++ b/airbyte-bootloader/Dockerfile @@ -1,7 +1,7 @@ ARG JDK_VERSION=17.0.1 FROM openjdk:${JDK_VERSION}-slim -ARG VERSION=0.36.3-alpha +ARG VERSION=0.36.4-alpha ENV APPLICATION airbyte-bootloader ENV VERSION ${VERSION} diff --git a/airbyte-container-orchestrator/Dockerfile b/airbyte-container-orchestrator/Dockerfile index 68ea502c269525..9fb12182b0f063 100644 --- a/airbyte-container-orchestrator/Dockerfile +++ b/airbyte-container-orchestrator/Dockerfile @@ -25,7 +25,7 @@ RUN curl -fsSLo /usr/share/keyrings/kubernetes-archive-keyring.gpg https://packa RUN echo "deb [signed-by=/usr/share/keyrings/kubernetes-archive-keyring.gpg] https://apt.kubernetes.io/ kubernetes-xenial main" | tee /etc/apt/sources.list.d/kubernetes.list RUN apt-get update && apt-get install -y kubectl -ARG VERSION=0.36.3-alpha +ARG VERSION=0.36.4-alpha ENV APPLICATION airbyte-container-orchestrator ENV VERSION=${VERSION} diff --git a/airbyte-metrics/reporter/Dockerfile b/airbyte-metrics/reporter/Dockerfile index 024bbb22fd344c..2e71cbe0474661 100644 --- a/airbyte-metrics/reporter/Dockerfile +++ b/airbyte-metrics/reporter/Dockerfile @@ -1,7 +1,7 @@ ARG JDK_VERSION=17.0.1 FROM openjdk:${JDK_VERSION}-slim AS metrics-reporter -ARG VERSION=0.36.3-alpha +ARG VERSION=0.36.4-alpha ENV APPLICATION airbyte-metrics-reporter ENV VERSION ${VERSION} diff --git a/airbyte-scheduler/app/Dockerfile b/airbyte-scheduler/app/Dockerfile index fd8099512cf769..0337dd7a570d69 100644 --- a/airbyte-scheduler/app/Dockerfile +++ b/airbyte-scheduler/app/Dockerfile @@ -1,7 +1,7 @@ ARG JDK_VERSION=17.0.1 FROM openjdk:${JDK_VERSION}-slim AS scheduler -ARG VERSION=0.36.3-alpha +ARG VERSION=0.36.4-alpha ENV APPLICATION airbyte-scheduler ENV VERSION ${VERSION} diff --git a/airbyte-server/Dockerfile b/airbyte-server/Dockerfile index 8504029b1386b3..684de4acea2479 100644 --- a/airbyte-server/Dockerfile +++ b/airbyte-server/Dockerfile @@ -3,7 +3,7 @@ FROM openjdk:${JDK_VERSION}-slim AS server EXPOSE 8000 -ARG VERSION=0.36.3-alpha +ARG VERSION=0.36.4-alpha ENV APPLICATION airbyte-server ENV VERSION ${VERSION} diff --git a/airbyte-webapp/package-lock.json b/airbyte-webapp/package-lock.json index e779bd55ab2039..fd32fbc5bdffb7 100644 --- a/airbyte-webapp/package-lock.json +++ b/airbyte-webapp/package-lock.json @@ -1,12 +1,12 @@ { "name": "airbyte-webapp", - "version": "0.36.3-alpha", + "version": "0.36.4-alpha", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "airbyte-webapp", - "version": "0.36.3-alpha", + "version": "0.36.4-alpha", "dependencies": { "@fortawesome/fontawesome-svg-core": "^6.1.1", "@fortawesome/free-brands-svg-icons": "^6.1.1", diff --git a/airbyte-webapp/package.json b/airbyte-webapp/package.json index af16a467609a6f..25e460e3dd0438 100644 --- a/airbyte-webapp/package.json +++ b/airbyte-webapp/package.json @@ -1,6 +1,6 @@ { "name": "airbyte-webapp", - "version": "0.36.3-alpha", + "version": "0.36.4-alpha", "private": true, "engines": { "node": ">=16.0.0" diff --git a/airbyte-workers/Dockerfile b/airbyte-workers/Dockerfile index 3300fb0b083cd3..a6c42cec075e8f 100644 --- a/airbyte-workers/Dockerfile +++ b/airbyte-workers/Dockerfile @@ -25,7 +25,7 @@ RUN curl -fsSLo /usr/share/keyrings/kubernetes-archive-keyring.gpg https://packa RUN echo "deb [signed-by=/usr/share/keyrings/kubernetes-archive-keyring.gpg] https://apt.kubernetes.io/ kubernetes-xenial main" | tee /etc/apt/sources.list.d/kubernetes.list RUN apt-get update && apt-get install -y kubectl -ARG VERSION=0.36.3-alpha +ARG VERSION=0.36.4-alpha ENV APPLICATION airbyte-workers ENV VERSION ${VERSION} diff --git a/charts/airbyte/Chart.yaml b/charts/airbyte/Chart.yaml index 81a2129c32a96c..66e31a7d1fa9d0 100644 --- a/charts/airbyte/Chart.yaml +++ b/charts/airbyte/Chart.yaml @@ -21,7 +21,7 @@ version: 0.3.1 # incremented each time you make changes to the application. Versions are not expected to # follow Semantic Versioning. They should reflect the version the application is using. # It is recommended to use it with quotes. -appVersion: "0.36.3-alpha" +appVersion: "0.36.4-alpha" dependencies: - name: common diff --git a/charts/airbyte/README.md b/charts/airbyte/README.md index 2638d0a19dee20..41d8fc7fa98ed3 100644 --- a/charts/airbyte/README.md +++ b/charts/airbyte/README.md @@ -31,7 +31,7 @@ Helm charts for Airbyte. | `webapp.replicaCount` | Number of webapp replicas | `1` | | `webapp.image.repository` | The repository to use for the airbyte webapp image. | `airbyte/webapp` | | `webapp.image.pullPolicy` | the pull policy to use for the airbyte webapp image | `IfNotPresent` | -| `webapp.image.tag` | The airbyte webapp image tag. Defaults to the chart's AppVersion | `0.36.3-alpha` | +| `webapp.image.tag` | The airbyte webapp image tag. Defaults to the chart's AppVersion | `0.36.4-alpha` | | `webapp.podAnnotations` | Add extra annotations to the webapp pod(s) | `{}` | | `webapp.containerSecurityContext` | Security context for the container | `{}` | | `webapp.livenessProbe.enabled` | Enable livenessProbe on the webapp | `true` | @@ -73,7 +73,7 @@ Helm charts for Airbyte. | `scheduler.replicaCount` | Number of scheduler replicas | `1` | | `scheduler.image.repository` | The repository to use for the airbyte scheduler image. | `airbyte/scheduler` | | `scheduler.image.pullPolicy` | the pull policy to use for the airbyte scheduler image | `IfNotPresent` | -| `scheduler.image.tag` | The airbyte scheduler image tag. Defaults to the chart's AppVersion | `0.36.3-alpha` | +| `scheduler.image.tag` | The airbyte scheduler image tag. Defaults to the chart's AppVersion | `0.36.4-alpha` | | `scheduler.podAnnotations` | Add extra annotations to the scheduler pod | `{}` | | `scheduler.resources.limits` | The resources limits for the scheduler container | `{}` | | `scheduler.resources.requests` | The requested resources for the scheduler container | `{}` | @@ -120,7 +120,7 @@ Helm charts for Airbyte. | `server.replicaCount` | Number of server replicas | `1` | | `server.image.repository` | The repository to use for the airbyte server image. | `airbyte/server` | | `server.image.pullPolicy` | the pull policy to use for the airbyte server image | `IfNotPresent` | -| `server.image.tag` | The airbyte server image tag. Defaults to the chart's AppVersion | `0.36.3-alpha` | +| `server.image.tag` | The airbyte server image tag. Defaults to the chart's AppVersion | `0.36.4-alpha` | | `server.podAnnotations` | Add extra annotations to the server pod | `{}` | | `server.containerSecurityContext` | Security context for the container | `{}` | | `server.livenessProbe.enabled` | Enable livenessProbe on the server | `true` | @@ -158,7 +158,7 @@ Helm charts for Airbyte. | `worker.replicaCount` | Number of worker replicas | `1` | | `worker.image.repository` | The repository to use for the airbyte worker image. | `airbyte/worker` | | `worker.image.pullPolicy` | the pull policy to use for the airbyte worker image | `IfNotPresent` | -| `worker.image.tag` | The airbyte worker image tag. Defaults to the chart's AppVersion | `0.36.3-alpha` | +| `worker.image.tag` | The airbyte worker image tag. Defaults to the chart's AppVersion | `0.36.4-alpha` | | `worker.podAnnotations` | Add extra annotations to the worker pod(s) | `{}` | | `worker.containerSecurityContext` | Security context for the container | `{}` | | `worker.livenessProbe.enabled` | Enable livenessProbe on the worker | `true` | @@ -190,7 +190,7 @@ Helm charts for Airbyte. | ----------------------------- | -------------------------------------------------------------------- | -------------------- | | `bootloader.image.repository` | The repository to use for the airbyte bootloader image. | `airbyte/bootloader` | | `bootloader.image.pullPolicy` | the pull policy to use for the airbyte bootloader image | `IfNotPresent` | -| `bootloader.image.tag` | The airbyte bootloader image tag. Defaults to the chart's AppVersion | `0.36.3-alpha` | +| `bootloader.image.tag` | The airbyte bootloader image tag. Defaults to the chart's AppVersion | `0.36.4-alpha` | ### Temporal parameters diff --git a/charts/airbyte/values.yaml b/charts/airbyte/values.yaml index 4a19a4c4b0a25b..25a245a34229b7 100644 --- a/charts/airbyte/values.yaml +++ b/charts/airbyte/values.yaml @@ -43,7 +43,7 @@ webapp: image: repository: airbyte/webapp pullPolicy: IfNotPresent - tag: 0.36.3-alpha + tag: 0.36.4-alpha ## @param webapp.podAnnotations [object] Add extra annotations to the webapp pod(s) ## @@ -209,7 +209,7 @@ scheduler: image: repository: airbyte/scheduler pullPolicy: IfNotPresent - tag: 0.36.3-alpha + tag: 0.36.4-alpha ## @param scheduler.podAnnotations [object] Add extra annotations to the scheduler pod ## @@ -440,7 +440,7 @@ server: image: repository: airbyte/server pullPolicy: IfNotPresent - tag: 0.36.3-alpha + tag: 0.36.4-alpha ## @param server.podAnnotations [object] Add extra annotations to the server pod ## @@ -581,7 +581,7 @@ worker: image: repository: airbyte/worker pullPolicy: IfNotPresent - tag: 0.36.3-alpha + tag: 0.36.4-alpha ## @param worker.podAnnotations [object] Add extra annotations to the worker pod(s) ## @@ -699,7 +699,7 @@ bootloader: image: repository: airbyte/bootloader pullPolicy: IfNotPresent - tag: 0.36.3-alpha + tag: 0.36.4-alpha ## @param bootloader.podAnnotations [object] Add extra annotations to the bootloader pod ## diff --git a/docs/operator-guides/upgrading-airbyte.md b/docs/operator-guides/upgrading-airbyte.md index e58aa63432b017..585a53655ed3e4 100644 --- a/docs/operator-guides/upgrading-airbyte.md +++ b/docs/operator-guides/upgrading-airbyte.md @@ -103,7 +103,7 @@ If you are upgrading from (i.e. your current version of Airbyte is) Airbyte vers Here's an example of what it might look like with the values filled in. It assumes that the downloaded `airbyte_archive.tar.gz` is in `/tmp`. ```bash - docker run --rm -v /tmp:/config airbyte/migration:0.36.3-alpha --\ + docker run --rm -v /tmp:/config airbyte/migration:0.36.4-alpha --\ --input /config/airbyte_archive.tar.gz\ --output /config/airbyte_archive_migrated.tar.gz ``` diff --git a/kube/overlays/stable-with-resource-limits/.env b/kube/overlays/stable-with-resource-limits/.env index 75b1cfdbfa75e8..2c4fabdad59105 100644 --- a/kube/overlays/stable-with-resource-limits/.env +++ b/kube/overlays/stable-with-resource-limits/.env @@ -1,4 +1,4 @@ -AIRBYTE_VERSION=0.36.3-alpha +AIRBYTE_VERSION=0.36.4-alpha # Airbyte Internal Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db DATABASE_HOST=airbyte-db-svc diff --git a/kube/overlays/stable-with-resource-limits/kustomization.yaml b/kube/overlays/stable-with-resource-limits/kustomization.yaml index 9de37822186a32..4a40316bec6e56 100644 --- a/kube/overlays/stable-with-resource-limits/kustomization.yaml +++ b/kube/overlays/stable-with-resource-limits/kustomization.yaml @@ -8,17 +8,17 @@ bases: images: - name: airbyte/db - newTag: 0.36.3-alpha + newTag: 0.36.4-alpha - name: airbyte/bootloader - newTag: 0.36.3-alpha + newTag: 0.36.4-alpha - name: airbyte/scheduler - newTag: 0.36.3-alpha + newTag: 0.36.4-alpha - name: airbyte/server - newTag: 0.36.3-alpha + newTag: 0.36.4-alpha - name: airbyte/webapp - newTag: 0.36.3-alpha + newTag: 0.36.4-alpha - name: airbyte/worker - newTag: 0.36.3-alpha + newTag: 0.36.4-alpha - name: temporalio/auto-setup newTag: 1.7.0 diff --git a/kube/overlays/stable/.env b/kube/overlays/stable/.env index 3b82b057729cfc..89bb305385fad1 100644 --- a/kube/overlays/stable/.env +++ b/kube/overlays/stable/.env @@ -1,4 +1,4 @@ -AIRBYTE_VERSION=0.36.3-alpha +AIRBYTE_VERSION=0.36.4-alpha # Airbyte Internal Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db DATABASE_HOST=airbyte-db-svc diff --git a/kube/overlays/stable/kustomization.yaml b/kube/overlays/stable/kustomization.yaml index 277f73114c8f5a..336bdd4f2f75af 100644 --- a/kube/overlays/stable/kustomization.yaml +++ b/kube/overlays/stable/kustomization.yaml @@ -8,17 +8,17 @@ bases: images: - name: airbyte/db - newTag: 0.36.3-alpha + newTag: 0.36.4-alpha - name: airbyte/bootloader - newTag: 0.36.3-alpha + newTag: 0.36.4-alpha - name: airbyte/scheduler - newTag: 0.36.3-alpha + newTag: 0.36.4-alpha - name: airbyte/server - newTag: 0.36.3-alpha + newTag: 0.36.4-alpha - name: airbyte/webapp - newTag: 0.36.3-alpha + newTag: 0.36.4-alpha - name: airbyte/worker - newTag: 0.36.3-alpha + newTag: 0.36.4-alpha - name: temporalio/auto-setup newTag: 1.7.0 diff --git a/octavia-cli/Dockerfile b/octavia-cli/Dockerfile index 0172db4bf5b4f1..adba617012e543 100644 --- a/octavia-cli/Dockerfile +++ b/octavia-cli/Dockerfile @@ -14,5 +14,5 @@ USER octavia-cli WORKDIR /home/octavia-project ENTRYPOINT ["octavia"] -LABEL io.airbyte.version=0.36.3-alpha +LABEL io.airbyte.version=0.36.4-alpha LABEL io.airbyte.name=airbyte/octavia-cli diff --git a/octavia-cli/README.md b/octavia-cli/README.md index f1da22e32d3c48..444e39503b3e8a 100644 --- a/octavia-cli/README.md +++ b/octavia-cli/README.md @@ -105,7 +105,7 @@ This script: ```bash touch ~/.octavia # Create a file to store env variables that will be mapped the octavia-cli container mkdir my_octavia_project_directory # Create your octavia project directory where YAML configurations will be stored. -docker run --name octavia-cli -i --rm -v my_octavia_project_directory:/home/octavia-project --network host --user $(id -u):$(id -g) --env-file ~/.octavia airbyte/octavia-cli:0.36.3-alpha +docker run --name octavia-cli -i --rm -v my_octavia_project_directory:/home/octavia-project --network host --user $(id -u):$(id -g) --env-file ~/.octavia airbyte/octavia-cli:0.36.4-alpha ``` ### Using `docker-compose` diff --git a/octavia-cli/install.sh b/octavia-cli/install.sh index 4d6e1d2d16598a..5f3802e7e04658 100755 --- a/octavia-cli/install.sh +++ b/octavia-cli/install.sh @@ -3,7 +3,7 @@ # This install scripts currently only works for ZSH and Bash profiles. # It creates an octavia alias in your profile bound to a docker run command and your current user. -VERSION=0.36.3-alpha +VERSION=0.36.4-alpha OCTAVIA_ENV_FILE=${HOME}/.octavia detect_profile() { diff --git a/octavia-cli/setup.py b/octavia-cli/setup.py index 5e528cae231e8c..8c3418a48b36e4 100644 --- a/octavia-cli/setup.py +++ b/octavia-cli/setup.py @@ -15,7 +15,7 @@ setup( name="octavia-cli", - version="0.36.3", + version="0.36.4", description="A command line interface to manage Airbyte configurations", long_description=README, author="Airbyte", From 3a032d9276dc2021fecca5717caa61fc068b368f Mon Sep 17 00:00:00 2001 From: Denis Davydov Date: Thu, 28 Apr 2022 09:08:16 +0300 Subject: [PATCH 014/152] #10938 source GA to Beta: upd window_in_days description (#12385) --- .../source_google_analytics_v4/spec.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/airbyte-integrations/connectors/source-google-analytics-v4/source_google_analytics_v4/spec.json b/airbyte-integrations/connectors/source-google-analytics-v4/source_google_analytics_v4/spec.json index 871aef715e4f16..07482cb89535b7 100644 --- a/airbyte-integrations/connectors/source-google-analytics-v4/source_google_analytics_v4/spec.json +++ b/airbyte-integrations/connectors/source-google-analytics-v4/source_google_analytics_v4/spec.json @@ -22,8 +22,8 @@ }, "window_in_days": { "type": "integer", - "title": "Window in days (Optional)", - "description": "The amount of days each stream slice would consist of beginning from start_date. Bigger the value - faster the fetch. (Min=1, as for a Day; Max=364, as for a Year).", + "title": "Data request window (Optional)", + "description": "The amount of data batched by the number of days. The bigger the value, the bigger the batch size and the lower the API requests made. (Min=1, as for a Day; Max=364, as for a Year).", "examples": [30, 60, 90, 120, 200, 364], "default": 1 }, From 42a58b0273d7010fa0296fbd4410a82e5fe5802f Mon Sep 17 00:00:00 2001 From: Serhii Chvaliuk Date: Thu, 28 Apr 2022 09:36:08 +0300 Subject: [PATCH 015/152] Source Amazon Ads: Update fields in source-connectors specifications (#11730) * Update fields in source-connectors specifications Signed-off-by: Sergey Chvalyuk --- .../resources/seed/source_definitions.yaml | 2 +- .../src/main/resources/seed/source_specs.yaml | 73 ++++++++++--------- .../connectors/source-amazon-ads/Dockerfile | 2 +- .../integration_tests/spec.json | 62 ++++++++-------- .../source_amazon_ads/source.py | 2 +- .../source_amazon_ads/spec.py | 73 ++++++++++--------- .../streams/report_streams/products_report.py | 2 +- docs/integrations/sources/amazon-ads.md | 6 +- 8 files changed, 115 insertions(+), 107 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index d642115f0f67c4..21604d505e03b5 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -15,7 +15,7 @@ - name: Amazon Ads sourceDefinitionId: c6b0a29e-1da9-4512-9002-7bfd0cba2246 dockerRepository: airbyte/source-amazon-ads - dockerImageTag: 0.1.6 + dockerImageTag: 0.1.7 documentationUrl: https://docs.airbyte.io/integrations/sources/amazon-ads icon: amazonads.svg sourceType: api diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index f3e3f72075baae..f1c4be36edefa1 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -87,9 +87,9 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-amazon-ads:0.1.6" +- dockerImage: "airbyte/source-amazon-ads:0.1.7" spec: - documentationUrl: "https://docs.airbyte.io/integrations/sources/amazon-ads" + documentationUrl: "https://docs.airbyte.com/integrations/sources/amazon-ads" connectionSpecification: title: "Amazon Ads Spec" type: "object" @@ -100,72 +100,73 @@ order: 0 type: "string" client_id: - title: "Client Id" - description: "Oauth client id How to create your Login with Amazon" - name: "Client ID" + title: "Client ID" + description: "The Client ID of your Amazon Ads developer application." + order: 1 type: "string" client_secret: title: "Client Secret" - description: "Oauth client secret How to create your Login with Amazon" - name: "Client secret" + description: "The Client Secret of your Amazon Ads developer application." airbyte_secret: true + order: 2 type: "string" refresh_token: title: "Refresh Token" - description: "Oauth 2.0 refresh_token, read details here" - name: "Oauth refresh token" + description: "Amazon Ads Refresh Token. See the docs for more information on how to obtain this token." airbyte_secret: true - type: "string" - start_date: - title: "Start Date" - description: "Start date for collectiong reports, should not be more than\ - \ 60 days in past. In YYYY-MM-DD format" - name: "Start date" - examples: - - "2022-10-10" - - "2022-10-22" + order: 3 type: "string" region: - description: "An enumeration." - default: "NA" - name: "Region" - title: "AmazonAdsRegion" + title: "Region *" + description: "Region to pull data from (EU/NA/FE/SANDBOX). See docs for more details." enum: - "NA" - "EU" - "FE" - "SANDBOX" type: "string" - profiles: - title: "Profiles" - description: "profile Ids you want to fetch data for" - name: "Profile Ids" - type: "array" - items: - type: "integer" + default: "NA" + order: 4 report_wait_timeout: - title: "Report Wait Timeout" + title: "Report Wait Timeout *" description: "Timeout duration in minutes for Reports. Eg. 30" default: 30 - name: "Report Wait Timeout" examples: - 30 - 120 + order: 5 type: "integer" report_generation_max_retries: - title: "Report Generation Max Retries" + title: "Report Generation Maximum Retries *" description: "Maximum retries Airbyte will attempt for fetching Report Data.\ \ Eg. 5" default: 5 - name: "Report Geration Maximum Retries" examples: - 5 - 10 - 15 + order: 6 type: "integer" + start_date: + title: "Start Date (Optional)" + description: "The Start date for collecting reports, should not be more\ + \ than 60 days in the past. In YYYY-MM-DD format" + examples: + - "2022-10-10" + - "2022-10-22" + order: 7 + type: "string" + profiles: + title: "Profile IDs (Optional)" + description: "Profile IDs you want to fetch data for. See docs for more details." + order: 8 + type: "array" + items: + type: "integer" required: - "client_id" - "client_secret" diff --git a/airbyte-integrations/connectors/source-amazon-ads/Dockerfile b/airbyte-integrations/connectors/source-amazon-ads/Dockerfile index bb4f4e6cb7b263..c2b37d453265dd 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/Dockerfile +++ b/airbyte-integrations/connectors/source-amazon-ads/Dockerfile @@ -12,5 +12,5 @@ RUN pip install . ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.6 +LABEL io.airbyte.version=0.1.7 LABEL io.airbyte.name=airbyte/source-amazon-ads diff --git a/airbyte-integrations/connectors/source-amazon-ads/integration_tests/spec.json b/airbyte-integrations/connectors/source-amazon-ads/integration_tests/spec.json index 18ee31dc98a911..1eb16cca0e35cf 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/integration_tests/spec.json +++ b/airbyte-integrations/connectors/source-amazon-ads/integration_tests/spec.json @@ -1,5 +1,5 @@ { - "documentationUrl": "https://docs.airbyte.io/integrations/sources/amazon-ads", + "documentationUrl": "https://docs.airbyte.com/integrations/sources/amazon-ads", "connectionSpecification": { "title": "Amazon Ads Spec", "type": "object", @@ -12,64 +12,64 @@ "type": "string" }, "client_id": { - "title": "Client Id", - "description": "Oauth client id How to create your Login with Amazon", - "name": "Client ID", + "title": "Client ID", + "description": "The Client ID of your Amazon Ads developer application.", + "order": 1, "type": "string" }, "client_secret": { "title": "Client Secret", - "description": "Oauth client secret How to create your Login with Amazon", - "name": "Client secret", + "description": "The Client Secret of your Amazon Ads developer application.", "airbyte_secret": true, + "order": 2, "type": "string" }, "refresh_token": { "title": "Refresh Token", - "description": "Oauth 2.0 refresh_token, read details here", - "name": "Oauth refresh token", + "description": "Amazon Ads Refresh Token. See the docs for more information on how to obtain this token.", "airbyte_secret": true, - "type": "string" - }, - "start_date": { - "title": "Start Date", - "description": "Start date for collectiong reports, should not be more than 60 days in past. In YYYY-MM-DD format", - "name": "Start date", - "examples": ["2022-10-10", "2022-10-22"], + "order": 3, "type": "string" }, "region": { - "description": "An enumeration.", + "title": "Region *", + "description": "Region to pull data from (EU/NA/FE/SANDBOX). See docs for more details.", "default": "NA", - "name": "Region", - "title": "AmazonAdsRegion", "enum": ["NA", "EU", "FE", "SANDBOX"], + "order": 4, "type": "string" }, - "profiles": { - "title": "Profiles", - "description": "profile Ids you want to fetch data for", - "name": "Profile Ids", - "type": "array", - "items": { - "type": "integer" - } - }, "report_wait_timeout": { - "title": "Report Wait Timeout", + "title": "Report Wait Timeout *", "description": "Timeout duration in minutes for Reports. Eg. 30", "default": 30, - "name": "Report Wait Timeout", "examples": [30, 120], + "order": 5, "type": "integer" }, "report_generation_max_retries": { - "title": "Report Generation Max Retries", + "title": "Report Generation Maximum Retries *", "description": "Maximum retries Airbyte will attempt for fetching Report Data. Eg. 5", "default": 5, - "name": "Report Geration Maximum Retries", "examples": [5, 10, 15], + "order": 6, "type": "integer" + }, + "start_date": { + "title": "Start Date (Optional)", + "description": "The Start date for collecting reports, should not be more than 60 days in the past. In YYYY-MM-DD format", + "examples": ["2022-10-10", "2022-10-22"], + "order": 7, + "type": "string" + }, + "profiles": { + "title": "Profile IDs (Optional)", + "description": "Profile IDs you want to fetch data for. See docs for more details.", + "order": 8, + "type": "array", + "items": { + "type": "integer" + } } }, "required": ["client_id", "client_secret", "refresh_token"] diff --git a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/source.py b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/source.py index 03da13e1790796..1d836efff44720 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/source.py +++ b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/source.py @@ -93,7 +93,7 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: def spec(self, *args) -> ConnectorSpecification: return ConnectorSpecification( - documentationUrl="https://docs.airbyte.io/integrations/sources/amazon-ads", + documentationUrl="https://docs.airbyte.com/integrations/sources/amazon-ads", connectionSpecification=AmazonAdsConfig.schema(), advanced_auth=advanced_auth, ) diff --git a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/spec.py b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/spec.py index abda177093fa44..7efc1db550dcf8 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/spec.py +++ b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/spec.py @@ -5,6 +5,7 @@ from typing import List from airbyte_cdk.models import AdvancedAuth, AuthFlowType, OAuthConfigSpecification +from airbyte_cdk.sources.utils.schema_helpers import expand_refs from pydantic import BaseModel, Extra, Field from source_amazon_ads.constants import AmazonAdsRegion @@ -21,67 +22,71 @@ class Config: auth_type: str = Field(default="oauth2.0", const=True, order=0) client_id: str = Field( - name="Client ID", - description=( - 'Oauth client id ' - "How to create your Login with Amazon" - ), + title="Client ID", + description="The Client ID of your Amazon Ads developer application.", + order=1, ) + client_secret: str = Field( - name="Client secret", - description=( - 'Oauth client secret ' - "How to create your Login with Amazon" - ), + title="Client Secret", + description="The Client Secret of your Amazon Ads developer application.", airbyte_secret=True, + order=2, ) refresh_token: str = Field( - name="Oauth refresh token", - description=( - 'Oauth 2.0 refresh_token, ' - "read details here" - ), + title="Refresh Token", + description='Amazon Ads Refresh Token. See the docs for more information on how to obtain this token.', airbyte_secret=True, + order=3, ) - start_date: str = Field( - None, - name="Start date", - description="Start date for collectiong reports, should not be more than 60 days in past. In YYYY-MM-DD format", - examples=["2022-10-10", "2022-10-22"], - ) - - region: AmazonAdsRegion = Field(name="Region", description="Region to pull data from (EU/NA/FE/SANDBOX)", default=AmazonAdsRegion.NA) - - profiles: List[int] = Field( - None, - name="Profile Ids", - description="profile Ids you want to fetch data for", + region: AmazonAdsRegion = Field( + title="Region *", + description='Region to pull data from (EU/NA/FE/SANDBOX). See docs for more details.', + default=AmazonAdsRegion.NA, + order=4, ) report_wait_timeout: int = Field( - name="Report Wait Timeout", + title="Report Wait Timeout *", description="Timeout duration in minutes for Reports. Eg. 30", default=30, examples=[30, 120], + order=5, ) report_generation_max_retries: int = Field( - name="Report Geration Maximum Retries", + title="Report Generation Maximum Retries *", description="Maximum retries Airbyte will attempt for fetching Report Data. Eg. 5", default=5, examples=[5, 10, 15], + order=6, + ) + + start_date: str = Field( + None, + title="Start Date (Optional)", + description="The Start date for collecting reports, should not be more than 60 days in the past. In YYYY-MM-DD format", + examples=["2022-10-10", "2022-10-22"], + order=7, + ) + + profiles: List[int] = Field( + None, + title="Profile IDs (Optional)", + description='Profile IDs you want to fetch data for. See docs for more details.', + order=8, ) @classmethod def schema(cls, **kwargs): schema = super().schema(**kwargs) + expand_refs(schema) # Transform pydantic generated enum for region - definitions = schema.pop("definitions", None) - if definitions: - schema["properties"]["region"].update(definitions["AmazonAdsRegion"]) - schema["properties"]["region"].pop("allOf", None) + if schema["properties"]["region"].get("allOf"): + schema["properties"]["region"] = {**schema["properties"]["region"]["allOf"][0], **schema["properties"]["region"]} + schema["properties"]["region"].pop("allOf") return schema diff --git a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/report_streams/products_report.py b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/report_streams/products_report.py index 136d3d14ce82dc..8da6b2037a9756 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/report_streams/products_report.py +++ b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/report_streams/products_report.py @@ -266,7 +266,7 @@ def _get_init_report_body(self, report_date: str, record_type: str, profile): metrics_list = copy(metrics_list) metrics_list.remove("sku") -# adId is automatically added to the report by amazon and requesting adId causes an amazon error + # adId is automatically added to the report by amazon and requesting adId causes an amazon error if "adId" in metrics_list: metrics_list.remove("adId") diff --git a/docs/integrations/sources/amazon-ads.md b/docs/integrations/sources/amazon-ads.md index 766b6946fa05e6..65af43664364a3 100644 --- a/docs/integrations/sources/amazon-ads.md +++ b/docs/integrations/sources/amazon-ads.md @@ -59,10 +59,11 @@ Information about expected report generation waiting time you may find [here](ht * client\_id * client\_secret * refresh\_token -* scope -* profiles * region +* report\_wait\_timeout +* report\_generation\_max\_retries * start\_date \(optional\) +* profiles \(optional\) More how to get client\_id and client\_secret you can find on [AWS docs](https://advertising.amazon.com/API/docs/en-us/setting-up/step-1-create-lwa-app). @@ -76,6 +77,7 @@ Start date used for generating reports starting from the specified start date. S | Version | Date | Pull Request | Subject | | :--- | :--- | :--- | :--- | +| `0.1.7` | 2022-04-27 | [\#11730](https://github.com/airbytehq/airbyte/pull/11730) | Update fields in source-connectors specifications | | `0.1.6` | 2022-04-20 | [\#11659](https://github.com/airbytehq/airbyte/pull/11659) | Add adId to products report | | `0.1.5` | 2022-04-08 | [\#11430](https://github.com/airbytehq/airbyte/pull/11430) | `Added support OAuth2.0` | | `0.1.4` | 2022-02-21 | [\#10513](https://github.com/airbytehq/airbyte/pull/10513) | `Increasing REPORT_WAIT_TIMEOUT for supporting report generation which takes longer time ` | From fd4b71e25f0cd39f7f1a2330ff5d7f3ba93020dc Mon Sep 17 00:00:00 2001 From: Baz Date: Thu, 28 Apr 2022 14:57:50 +0300 Subject: [PATCH 016/152] =?UTF-8?q?=F0=9F=90=9B=20=20Source=20Amplitude:?= =?UTF-8?q?=20add=20error=20descriptions=20and=20fix=20`events`=20stream?= =?UTF-8?q?=20fail=20on=20404=20(#12430)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../resources/seed/source_definitions.yaml | 2 +- .../src/main/resources/seed/source_specs.yaml | 2 +- .../connectors/source-amplitude/Dockerfile | 2 +- .../source-amplitude/source_amplitude/api.py | 71 ++++++++++++------- .../source_amplitude/errors.py | 34 +++++++++ .../source-amplitude/unit_tests/unit_test.py | 21 ++++-- docs/integrations/sources/amplitude.md | 1 + 7 files changed, 102 insertions(+), 31 deletions(-) create mode 100644 airbyte-integrations/connectors/source-amplitude/source_amplitude/errors.py diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index 21604d505e03b5..d9d89726f44072 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -35,7 +35,7 @@ - name: Amplitude sourceDefinitionId: fa9f58c6-2d03-4237-aaa4-07d75e0c1396 dockerRepository: airbyte/source-amplitude - dockerImageTag: 0.1.4 + dockerImageTag: 0.1.5 documentationUrl: https://docs.airbyte.io/integrations/sources/amplitude icon: amplitude.svg sourceType: api diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index f1c4be36edefa1..e18e005985dcdf 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -476,7 +476,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-amplitude:0.1.4" +- dockerImage: "airbyte/source-amplitude:0.1.5" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/amplitude" connectionSpecification: diff --git a/airbyte-integrations/connectors/source-amplitude/Dockerfile b/airbyte-integrations/connectors/source-amplitude/Dockerfile index 210e952009ec75..f2cad066bbc678 100644 --- a/airbyte-integrations/connectors/source-amplitude/Dockerfile +++ b/airbyte-integrations/connectors/source-amplitude/Dockerfile @@ -12,5 +12,5 @@ RUN pip install . ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.4 +LABEL io.airbyte.version=0.1.5 LABEL io.airbyte.name=airbyte/source-amplitude diff --git a/airbyte-integrations/connectors/source-amplitude/source_amplitude/api.py b/airbyte-integrations/connectors/source-amplitude/source_amplitude/api.py index f5e3e68a3f229c..0d2908608a5e35 100644 --- a/airbyte-integrations/connectors/source-amplitude/source_amplitude/api.py +++ b/airbyte-integrations/connectors/source-amplitude/source_amplitude/api.py @@ -17,6 +17,8 @@ from airbyte_cdk.models import SyncMode from airbyte_cdk.sources.streams.http import HttpStream +from .errors import HTTP_ERROR_CODES, error_msg_from_status + class AmplitudeStream(HttpStream, ABC): @@ -27,8 +29,12 @@ def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, return None def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - response_data = response.json() - yield from response_data.get(self.name, []) + status = response.status_code + if status in HTTP_ERROR_CODES.keys(): + error_msg_from_status(status) + yield from [] + else: + yield from response.json().get(self.data_field, []) def path(self, **kwargs) -> str: return f"{self.api_version}/{self.name}" @@ -37,14 +43,12 @@ def path(self, **kwargs) -> str: class Cohorts(AmplitudeStream): primary_key = "id" api_version = 3 + data_field = "cohorts" class Annotations(AmplitudeStream): primary_key = "id" - - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - response_data = response.json() - yield from response_data.get("data", []) + data_field = "data" class IncrementalAmplitudeStream(AmplitudeStream, ABC): @@ -124,6 +128,22 @@ def _parse_zip_file(self, zip_file: IO[bytes]) -> Iterable[Mapping]: for record in file: yield json.loads(record) + def stream_slices(self, stream_state: Mapping[str, Any] = None, **kwargs) -> Iterable[Optional[Mapping[str, Any]]]: + slices = [] + start = self._start_date + if stream_state: + start = pendulum.parse(stream_state.get(self.cursor_field)) + end = pendulum.now() + while start <= end: + slices.append( + { + "start": start.strftime(self.date_template), + "end": self._get_end_date(start).strftime(self.date_template), + } + ) + start = start.add(**self.time_interval) + return slices + def read_records( self, sync_mode: SyncMode, @@ -132,34 +152,35 @@ def read_records( stream_state: Mapping[str, Any] = None, ) -> Iterable[Mapping[str, Any]]: stream_state = stream_state or {} - params = self.request_params(stream_state=stream_state, stream_slice=stream_slice, next_page_token=None) - # API returns data only when requested with a difference between 'start' and 'end' of 6 or more hours. - if pendulum.parse(params["start"]).add(hours=6) > pendulum.parse(params["end"]): - return [] + start = pendulum.parse(stream_slice["start"]).add(hours=6) + end = pendulum.parse(stream_slice["end"]) + if start > end: + yield from [] + # sometimes the API throws a 404 error for not obvious reasons, we have to handle it and log it. # for example, if there is no data from the specified time period, a 404 exception is thrown # https://developers.amplitude.com/docs/export-api#status-codes + try: + self.logger.info(f"Fetching {self.name} time range: {start.strftime('%Y-%m-%d')} - {end.strftime('%Y-%m-%d')}") yield from super().read_records(sync_mode, cursor_field, stream_slice, stream_state) except requests.exceptions.HTTPError as error: - if error.response.status_code == 404: - self.logger.warn(f"Error during syncing {self.name} stream - {error}") - return [] + status = error.response.status_code + if status in HTTP_ERROR_CODES.keys(): + error_msg_from_status(status) + yield from [] else: + self.logger.error(f"Error during syncing {self.name} stream - {error}") raise - def request_params( - self, stream_state: Mapping[str, Any], next_page_token: Mapping[str, Any] = None, **kwargs - ) -> MutableMapping[str, Any]: - params = super().request_params(stream_state=stream_state, next_page_token=next_page_token, **kwargs) - if stream_state or next_page_token: - params["start"] = pendulum.parse(params["start"]).add(hours=1).strftime(self.date_template) + def request_params(self, stream_slice: Mapping[str, Any], **kwargs) -> MutableMapping[str, Any]: + params = self.base_params + params["start"] = pendulum.parse(stream_slice["start"]).strftime(self.date_template) + params["end"] = pendulum.parse(stream_slice["end"]).strftime(self.date_template) return params - def path( - self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None - ) -> str: + def path(self, **kwargs) -> str: return f"{self.api_version}/export" @@ -168,9 +189,10 @@ class ActiveUsers(IncrementalAmplitudeStream): name = "active_users" primary_key = "date" time_interval = {"months": 1} + data_field = "data" def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - response_data = response.json().get("data", []) + response_data = response.json().get(self.data_field, []) if response_data: series = list(map(list, zip(*response_data["series"]))) for i, date in enumerate(response_data["xValues"]): @@ -184,9 +206,10 @@ class AverageSessionLength(IncrementalAmplitudeStream): name = "average_session_length" primary_key = "date" time_interval = {"days": 15} + data_field = "data" def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - response_data = response.json().get("data", []) + response_data = response.json().get(self.data_field, []) if response_data: # From the Amplitude documentation it follows that "series" is an array with one element which is itself # an array that contains the average session length for each day. diff --git a/airbyte-integrations/connectors/source-amplitude/source_amplitude/errors.py b/airbyte-integrations/connectors/source-amplitude/source_amplitude/errors.py new file mode 100644 index 00000000000000..037dd7f6401bdd --- /dev/null +++ b/airbyte-integrations/connectors/source-amplitude/source_amplitude/errors.py @@ -0,0 +1,34 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + +import logging + +LOGGER = logging.getLogger("airbyte") + +HTTP_ERROR_CODES = { + 400: { + "msg": "The file size of the exported data is too large. Shorten the time ranges and try again. The limit size is 4GB.", + "lvl": "ERROR", + }, + 404: { + "msg": "No data collected", + "lvl": "WARN", + }, + 504: { + "msg": "The amount of data is large causing a timeout. For large amounts of data, the Amazon S3 destination is recommended.", + "lvl": "ERROR", + }, +} + + +def error_msg_from_status(status: int = None): + if status: + level = HTTP_ERROR_CODES[status]["lvl"] + message = HTTP_ERROR_CODES[status]["msg"] + if level == "ERROR": + LOGGER.error(message) + elif level == "WARN": + LOGGER.warn(message) + else: + LOGGER.error(f"Unknown error occured: code {status}") diff --git a/airbyte-integrations/connectors/source-amplitude/unit_tests/unit_test.py b/airbyte-integrations/connectors/source-amplitude/unit_tests/unit_test.py index 8eb316ca38e35e..f691390f40ec15 100755 --- a/airbyte-integrations/connectors/source-amplitude/unit_tests/unit_test.py +++ b/airbyte-integrations/connectors/source-amplitude/unit_tests/unit_test.py @@ -2,9 +2,9 @@ # Copyright (c) 2021 Airbyte, Inc., all rights reserved. # -import airbyte_cdk.models import pytest import requests +from airbyte_cdk.models import SyncMode from source_amplitude.api import Events @@ -13,16 +13,29 @@ def __init__(self, status_code): self.status_code = status_code -def test_http_error_handler(mocker): +def test_incremental_http_error_handler(mocker): stream = Events(start_date="2021-01-01T00:00:00Z") + stream_slice = stream.stream_slices()[0] mock_response = MockRequest(404) send_request_mocker = mocker.patch.object(stream, "_send_request", side_effect=requests.HTTPError(**{"response": mock_response})) with pytest.raises(StopIteration): - result = next(stream.read_records(sync_mode=airbyte_cdk.models.SyncMode.full_refresh)) + result = next(stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice=stream_slice)) assert result == [] mock_response = MockRequest(403) send_request_mocker.side_effect = requests.HTTPError(**{"response": mock_response}) with pytest.raises(requests.exceptions.HTTPError): - next(stream.read_records(sync_mode=airbyte_cdk.models.SyncMode.full_refresh)) + next(stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice=stream_slice)) + + mock_response = MockRequest(400) + send_request_mocker.side_effect = requests.HTTPError(**{"response": mock_response}) + with pytest.raises(StopIteration): + result = next(stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice=stream_slice)) + assert result == [] + + mock_response = MockRequest(504) + send_request_mocker.side_effect = requests.HTTPError(**{"response": mock_response}) + with pytest.raises(StopIteration): + result = next(stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice=stream_slice)) + assert result == [] diff --git a/docs/integrations/sources/amplitude.md b/docs/integrations/sources/amplitude.md index 8039e147b80e8d..93384fd00613d2 100644 --- a/docs/integrations/sources/amplitude.md +++ b/docs/integrations/sources/amplitude.md @@ -45,6 +45,7 @@ Please read [How to get your API key and Secret key](https://help.amplitude.com/ | Version | Date | Pull Request | Subject | | :------ | :--------- | :----------------------------------------------------- | :------ | +| 0.1.5 | 2022-04-28 | [12430](https://github.com/airbytehq/airbyte/pull/12430) | Added HTTP error descriptions and fixed `Events` stream fail caused by `404` HTTP Error | | 0.1.4 | 2021-12-23 | [8434](https://github.com/airbytehq/airbyte/pull/8434) | Update fields in source-connectors specifications | | 0.1.3 | 2021-10-12 | [6375](https://github.com/airbytehq/airbyte/pull/6375) | Log Transient 404 Error in Events stream | | 0.1.2 | 2021-09-21 | [6353](https://github.com/airbytehq/airbyte/pull/6353) | Correct output schemas on cohorts, events, active\_users, and average\_session\_lengths streams | From 87beaf52603c06773def0c4ab33f8919f7562eac Mon Sep 17 00:00:00 2001 From: Denis Davydov Date: Thu, 28 Apr 2022 16:04:27 +0300 Subject: [PATCH 017/152] Source Smartsheets: fix docs to certify for Beta (#12203) * #5520 fix scrambled columns bug * #5520 source smartsheets: add changelog item * #5520 move pytest to optional setup requirements * #12003 source smartsheets: implement incremental read + tests * #12003 source smartsheet: add changelog * #12003 source smartsheets: fix merge conflict on unit tests * #12003 source smartsheets: fix startdate in spec * #11759 source smartsheets: fix doc to certify for Beta --- .../connectors/source-smartsheets/Dockerfile | 2 +- .../source_smartsheets/spec.json | 9 +- docs/integrations/sources/smartsheets.md | 115 +++++++++--------- 3 files changed, 65 insertions(+), 61 deletions(-) diff --git a/airbyte-integrations/connectors/source-smartsheets/Dockerfile b/airbyte-integrations/connectors/source-smartsheets/Dockerfile index cb26f971e9dad1..8a86cdfb6699c7 100644 --- a/airbyte-integrations/connectors/source-smartsheets/Dockerfile +++ b/airbyte-integrations/connectors/source-smartsheets/Dockerfile @@ -14,5 +14,5 @@ COPY $CODE_PATH ./$CODE_PATH ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.10 +LABEL io.airbyte.version=0.1.11 LABEL io.airbyte.name=airbyte/source-smartsheets diff --git a/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/spec.json b/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/spec.json index 93c5d422ea2366..5e027cac477e95 100644 --- a/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/spec.json +++ b/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/spec.json @@ -11,20 +11,23 @@ "title": "Access Token", "description": "The Access Token for making authenticated requests. Find in the main menu: Account > Apps & Integrations > API Access", "type": "string", + "order": 0, "airbyte_secret": true }, "spreadsheet_id": { "title": "Sheet ID", "description": "The spreadsheet ID. Find in the spreadsheet menu: File > Properties", - "type": "string" + "type": "string", + "order": 1 }, "start_datetime": { - "title": "Start Datetime", + "title": "Start Datetime (Optional)", "type": "string", "examples": ["2000-01-01T13:00:00", "2000-01-01T13:00:00-07:00"], "description": "ISO 8601, for instance: `YYYY-MM-DDTHH:MM:SS`, `YYYY-MM-DDTHH:MM:SS+HH:MM`", "format": "date-time", - "default": "2020-01-01T00:00:00+00:00" + "default": "2020-01-01T00:00:00+00:00", + "order": 2 } } }, diff --git a/docs/integrations/sources/smartsheets.md b/docs/integrations/sources/smartsheets.md index 595e348d673d21..f06e8a20df8afa 100644 --- a/docs/integrations/sources/smartsheets.md +++ b/docs/integrations/sources/smartsheets.md @@ -1,93 +1,94 @@ # Smartsheets -### Table of Contents +This page guides you through the process of setting up the Smartsheets source connector. -* [Sync Details](smartsheets.md#sync-details) - * [Column datatype mapping](smartsheets.md#column-datatype-mapping) - * [Features](smartsheets.md#Features) - * [Performance Considerations](smartsheets.md#performance-considerations) -* [Getting Started](smartsheets.md#getting-started) - * [Requirements](smartsheets.md#requirements) - * [Setup Guide](smartsheets.md#setup-guide) - * [Configuring the source in the Airbyte UI](smartsheets.md#configuring-the-source-in-the-airbyte-ui) +## Prerequisites -## Sync Details - -The Smartsheet Source is written to pull data from a single Smartsheet spreadsheet. Unlike Google Sheets, Smartsheets only allows one sheet per Smartsheet - so a given Airbyte connector instance can sync only one sheet at a time. - -To replicate multiple spreadsheets, you can create multiple instances of the Smartsheet Source in Airbyte, reusing the API token for all your sheets that you need to sync. - -**Note: Column headers must contain only alphanumeric characters or `_` , as specified in the** [**Airbyte Protocol**](../../understanding-airbyte/airbyte-specification.md). +To configure the Smartsheet Source for syncs, you'll need the following: -### Column datatype mapping +* A Smartsheets API access token - generated by a Smartsheets user with at least **read** access +* The ID of the spreadsheet you'd like to sync -The data type mapping adopted by this connector is based on the Smartsheet [documentation](https://smartsheet.redoc.ly/tag/columnsRelated#section/Column-Types). +## Step 1: Set up Smartsheets -**NOTE**: For any column datatypes interpreted by Smartsheets beside `DATE` and `DATETIME`, this connector's source schema generation assumes a `string` type, in which case the `format` field is not required by Airbyte. +### Obtain a Smartsheets API access token -| Integration Type | Airbyte Type | Airbyte Format | -|:-----------------|:-------------|:---------------------| -| `TEXT_NUMBER` | `string` | | -| `DATE` | `string` | `format: date` | -| `DATETIME` | `string` | `format: date-time` | -| `anything else` | `string` | | +You can generate an API key for your account from a session of your Smartsheet webapp by clicking: -The remaining column datatypes supported by Smartsheets are more complex types \(e.g. Predecessor, Dropdown List\) and are not supported by this connector beyond its `string` representation. +* Account (top-right icon) +* Apps & Integrations +* API Access +* Generate new access token -### Features +For questions on advanced authorization flows, refer to [this](https://www.smartsheet.com/content-center/best-practices/tips-tricks/api-getting-started). -This source connector only supports Full Refresh Sync. Since Smartsheets only allows 5000 rows per sheet, it's likely that the Full Refresh Sync Mode will suit the majority of use-cases. +### Prepare the spreadsheet ID of your Smartsheet -| Feature | Supported? | -|:------------------|:-----------| -| Full Refresh Sync | Yes | -| Incremental Sync | No | -| Namespaces | No | +You'll also need the ID of the Spreadsheet you'd like to sync. Unlike Google Sheets, this ID is not found in the URL. You can find the required spreadsheet ID from your Smartsheet app session by going to: -### Performance considerations +* File +* Properties -At the time of writing, the [Smartsheets API rate limit](https://developers.smartsheet.com/blog/smartsheet-api-best-practices#:~:text=The%20Smartsheet%20API%20currently%20imposes,per%20minute%20per%20Access%20Token.) is 300 requests per minute per API access token. This connector makes 6 API calls per sync operation. +## Step 2: Set up the Smartsheets connector in Airbyte -## Getting started +**For Airbyte Cloud:** -### Requirements +1. [Log into your Airbyte Cloud](https://cloud.airbyte.io/workspaces) account. +2. In the left navigation bar, click Sources. In the top-right corner, click **+new source**. +3. On the Set up the source page, enter the name for the Smartsheets connector and select **Smartsheets** from the Source type dropdown. +4. Authenticate via OAuth2.0 using the API access token from Prerequisites +5. Enter the start date and the ID of the spreadsheet you want to sync +6. Submit the form -To configure the Smartsheet Source for syncs, you'll need the following: +**For Airbyte OSS:** +1. Navigate to the Airbute Open Source dashboard +2. Set the name for your source +3. Enter the API access token from Prerequisites +4. Enter the ID of the spreadsheet you want to sync +5. Enter a start sync date +6. Click **Set up source** -* A Smartsheets API access token - generated by a Smartsheets user with at least **read** access -* The ID of the spreadsheet you'd like to sync +## Supported sync modes -### Setup guide +The Smartsheets source connector supports the following[ sync modes](https://docs.airbyte.com/cloud/core-concepts#connection-sync-modes): + - Full Refresh | Overwrite + - Full Refresh | Append + - Incremental | Append + - Incremental | Deduped -#### Obtain a Smartsheets API access token +## Performance considerations -You can generate an API key for your account from a session of your Smartsheet webapp by clicking: +At the time of writing, the [Smartsheets API rate limit](https://developers.smartsheet.com/blog/smartsheet-api-best-practices#be-practical-adhere-to-rate-limiting-guidelines) is 300 requests per minute per API access token. -* Account \(top-right icon\) -* Apps & Integrations -* API Access -* Generate new access token +## Supported streams -For questions on advanced authorization flows, refer to [this](https://www.smartsheet.com/content-center/best-practices/tips-tricks/api-getting-started). +This source provides a single stream per spreadsheet with a dynamic schema, depending on your spreadsheet structure. +For example, having a spreadsheet `Customers`, the connector would introduce a stream with the same name and properties typed according to Data type map (see [below](https://docs.airbyte.com/integrations/sources/smartsheets/#data-type-map)). -#### The spreadsheet ID of your Smartsheet +## Important highlights +The Smartsheet Source is written to pull data from a single Smartsheet spreadsheet. Unlike Google Sheets, Smartsheets only allows one sheet per Smartsheet - so a given Airbyte connector instance can sync only one sheet at a time. To replicate multiple spreadsheets, you can create multiple instances of the Smartsheet Source in Airbyte, reusing the API token for all your sheets that you need to sync. -You'll also need the ID of the Spreadsheet you'd like to sync. Unlike Google Sheets, this ID is not found in the URL. You can find the required spreadsheet ID from your Smartsheet app session by going to: +**Note: Column headers must contain only alphanumeric characters or `_` , as specified in the** [**Airbyte Protocol**](../../understanding-airbyte/airbyte-specification.md). -* File -* Properties +## Data type map +The data type mapping adopted by this connector is based on the Smartsheet [documentation](https://smartsheet-platform.github.io/api-docs/index.html?python#column-types). -### Configuring the source in the Airbyte UI +**NOTE**: For any column datatypes interpreted by Smartsheets beside `DATE` and `DATETIME`, this connector's source schema generation assumes a `string` type, in which case the `format` field is not required by Airbyte. -To setup your new Smartsheets source, Airbyte will need: +| Integration Type | Airbyte Type | Airbyte Format | +|:-----------------|:-------------|:---------------------| +| `TEXT_NUMBER` | `string` | | +| `DATE` | `string` | `format: date` | +| `DATETIME` | `string` | `format: date-time` | +| `anything else` | `string` | | -1. Your API access token -2. The spreadsheet ID +The remaining column datatypes supported by Smartsheets are more complex types (e.g. Predecessor, Dropdown List) and are not supported by this connector beyond its `string` representation. ## Changelog | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:----------------------------------------------------------| -| 0.1.10 | 2022-04-15 | [12077](https://github.com/airbytehq/airbyte/pull/12077) | Implement incremental read and improve code test coverage | +| 0.1.11 | 2022-04-27 | [12203](https://github.com/airbytehq/airbyte/pull/12203) | Doc improvements | +| 0.1.10 | 2022-04-15 | [12077](https://github.com/airbytehq/airbyte/pull/12077) | Implement incremental read and improve code test coverage | | 0.1.9 | 2022-04-12 | [11911](https://github.com/airbytehq/airbyte/pull/11911) | Bugfix: scrambled columns | | 0.1.8 | 2022-02-04 | [9792](https://github.com/airbytehq/airbyte/pull/9792) | Added oauth support | From e45d1fafc06b51686ea616ed0882c6375522c79f Mon Sep 17 00:00:00 2001 From: Ohcui <42260310+Ohcui@users.noreply.github.com> Date: Thu, 28 Apr 2022 21:35:19 +0800 Subject: [PATCH 018/152] OpenPI config: remove stale required key (#12341) --- airbyte-api/src/main/openapi/config.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/airbyte-api/src/main/openapi/config.yaml b/airbyte-api/src/main/openapi/config.yaml index 477d7ee688daad..c1fdc0044d4bc6 100644 --- a/airbyte-api/src/main/openapi/config.yaml +++ b/airbyte-api/src/main/openapi/config.yaml @@ -3143,7 +3143,6 @@ components: WebBackendConnectionCreate: type: object required: - - connection - sourceId - destinationId - status From 31485d7b8107c092d2d9fb75379b627b43a8599c Mon Sep 17 00:00:00 2001 From: Jagannath Saragadam Date: Thu, 28 Apr 2022 08:14:27 -0700 Subject: [PATCH 019/152] Source Google Ads: change *_labels stream type from int to string in schema (#12158) --- .../resources/seed/source_definitions.yaml | 2 +- .../src/main/resources/seed/source_specs.yaml | 2 +- .../connectors/source-google-ads/Dockerfile | 2 +- .../schemas/ad_group_ad_labels.json | 6 +- .../schemas/ad_group_labels.json | 6 +- .../schemas/campaign_labels.json | 4 +- docs/integrations/sources/google-ads.md | 76 ++++++++++--------- 7 files changed, 50 insertions(+), 48 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index d9d89726f44072..2f1f37afafa69b 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -272,7 +272,7 @@ - name: Google Ads sourceDefinitionId: 253487c0-2246-43ba-a21f-5116b20a2c50 dockerRepository: airbyte/source-google-ads - dockerImageTag: 0.1.35 + dockerImageTag: 0.1.36 documentationUrl: https://docs.airbyte.io/integrations/sources/google-ads icon: google-adwords.svg sourceType: api diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index e18e005985dcdf..8830d25e2ba52f 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -2644,7 +2644,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-google-ads:0.1.35" +- dockerImage: "airbyte/source-google-ads:0.1.36" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/google-ads" connectionSpecification: diff --git a/airbyte-integrations/connectors/source-google-ads/Dockerfile b/airbyte-integrations/connectors/source-google-ads/Dockerfile index 5557152bd28b0d..5d34b4e8a4a4c9 100644 --- a/airbyte-integrations/connectors/source-google-ads/Dockerfile +++ b/airbyte-integrations/connectors/source-google-ads/Dockerfile @@ -13,5 +13,5 @@ RUN pip install . ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.35 +LABEL io.airbyte.version=0.1.36 LABEL io.airbyte.name=airbyte/source-google-ads diff --git a/airbyte-integrations/connectors/source-google-ads/source_google_ads/schemas/ad_group_ad_labels.json b/airbyte-integrations/connectors/source-google-ads/source_google_ads/schemas/ad_group_ad_labels.json index 5338f22aac8ae4..50c0377ae5789a 100644 --- a/airbyte-integrations/connectors/source-google-ads/source_google_ads/schemas/ad_group_ad_labels.json +++ b/airbyte-integrations/connectors/source-google-ads/source_google_ads/schemas/ad_group_ad_labels.json @@ -3,16 +3,16 @@ "type": "object", "properties": { "ad_group_ad.ad.resource_name": { - "type": ["null", "integer"] + "type": ["null", "string"] }, "ad_group_ad_label.resource_name": { "type": ["null", "string"] }, "label.name": { - "type": ["null", "integer"] + "type": ["null", "string"] }, "label.resource_name": { - "type": ["null", "integer"] + "type": ["null", "string"] } } } diff --git a/airbyte-integrations/connectors/source-google-ads/source_google_ads/schemas/ad_group_labels.json b/airbyte-integrations/connectors/source-google-ads/source_google_ads/schemas/ad_group_labels.json index 226aa638419820..ad0fb593eeeb99 100644 --- a/airbyte-integrations/connectors/source-google-ads/source_google_ads/schemas/ad_group_labels.json +++ b/airbyte-integrations/connectors/source-google-ads/source_google_ads/schemas/ad_group_labels.json @@ -3,16 +3,16 @@ "type": "object", "properties": { "ad_group.resource_name": { - "type": ["null", "integer"] + "type": ["null", "string"] }, "ad_group_label.resource_name": { "type": ["null", "string"] }, "label.name": { - "type": ["null", "integer"] + "type": ["null", "string"] }, "label.resource_name": { - "type": ["null", "integer"] + "type": ["null", "string"] } } } diff --git a/airbyte-integrations/connectors/source-google-ads/source_google_ads/schemas/campaign_labels.json b/airbyte-integrations/connectors/source-google-ads/source_google_ads/schemas/campaign_labels.json index 8ecd42b3dd5e02..022d767958f9f2 100644 --- a/airbyte-integrations/connectors/source-google-ads/source_google_ads/schemas/campaign_labels.json +++ b/airbyte-integrations/connectors/source-google-ads/source_google_ads/schemas/campaign_labels.json @@ -9,10 +9,10 @@ "type": ["null", "string"] }, "label.name": { - "type": ["null", "integer"] + "type": ["null", "string"] }, "label.resource_name": { - "type": ["null", "integer"] + "type": ["null", "string"] } } } diff --git a/docs/integrations/sources/google-ads.md b/docs/integrations/sources/google-ads.md index 68adfc021679e5..c6c2b1a38deafe 100644 --- a/docs/integrations/sources/google-ads.md +++ b/docs/integrations/sources/google-ads.md @@ -8,12 +8,12 @@ If you don't already have a developer token from Google Ads, make sure you follo ## Features -| Feature | Supported? | -| :--- | :--- | -| Full Refresh Sync | Yes | -| Incremental Sync | Yes | -| Replicate Incremental Deletes | No | -| SSL connection | Yes | +| Feature | Supported? | +| :---------------------------- | :--------- | +| Full Refresh Sync | Yes | +| Incremental Sync | Yes | +| Replicate Incremental Deletes | No | +| SSL connection | Yes | ## Supported Tables @@ -21,27 +21,27 @@ This source is capable of syncing the following tables and their data: ### Main Tables -* [accounts](https://developers.google.com/google-ads/api/fields/v8/customer) -* [ad\_group\_ads](https://developers.google.com/google-ads/api/fields/v8/ad_group_ad) -* [ad\_group\_ad\_labels](https://developers.google.com/google-ads/api/fields/v8/ad_group_ad_label) -* [ad\_groups](https://developers.google.com/google-ads/api/fields/v8/ad_group) -* [ad\_group\_labels](https://developers.google.com/google-ads/api/fields/v8/ad_group_label) -* [campaigns](https://developers.google.com/google-ads/api/fields/v8/campaign) -* [campaign\_labels](https://developers.google.com/google-ads/api/fields/v8/campaign_label) -* [click\_view](https://developers.google.com/google-ads/api/reference/rpc/v8/ClickView) -* [keyword](https://developers.google.com/google-ads/api/fields/v8/keyword_view) -* [geographic](https://developers.google.com/google-ads/api/fields/v8/geographic_view) +- [accounts](https://developers.google.com/google-ads/api/fields/v8/customer) +- [ad_group_ads](https://developers.google.com/google-ads/api/fields/v8/ad_group_ad) +- [ad_group_ad_labels](https://developers.google.com/google-ads/api/fields/v8/ad_group_ad_label) +- [ad_groups](https://developers.google.com/google-ads/api/fields/v8/ad_group) +- [ad_group_labels](https://developers.google.com/google-ads/api/fields/v8/ad_group_label) +- [campaigns](https://developers.google.com/google-ads/api/fields/v8/campaign) +- [campaign_labels](https://developers.google.com/google-ads/api/fields/v8/campaign_label) +- [click_view](https://developers.google.com/google-ads/api/reference/rpc/v8/ClickView) +- [keyword](https://developers.google.com/google-ads/api/fields/v8/keyword_view) +- [geographic](https://developers.google.com/google-ads/api/fields/v8/geographic_view) Note that `ad_groups`, `ad_group_ads`, and `campaigns` contain a `labels` field, which should be joined against their respective `*_labels` streams if you want to view the actual labels. For example, the `ad_groups` stream contains an `ad_group.labels` field, which you would join against the `ad_group_labels` stream's `label.resource_name` field. ### Report Tables -* [account\_performance\_report](https://developers.google.com/google-ads/api/docs/migration/mapping#account_performance) -* [ad\_group\_ad\_report](https://developers.google.com/google-ads/api/docs/migration/mapping#ad_performance) -* [display\_keyword\_report](https://developers.google.com/google-ads/api/docs/migration/mapping#display_keyword_performance) -* [display\_topics\_report](https://developers.google.com/google-ads/api/docs/migration/mapping#display_topics_performance) -* [shopping\_performance\_report](https://developers.google.com/google-ads/api/docs/migration/mapping#shopping_performance) -* [user_location_report](https://developers.google.com/google-ads/api/fields/v8/user_location_view) +- [account_performance_report](https://developers.google.com/google-ads/api/docs/migration/mapping#account_performance) +- [ad_group_ad_report](https://developers.google.com/google-ads/api/docs/migration/mapping#ad_performance) +- [display_keyword_report](https://developers.google.com/google-ads/api/docs/migration/mapping#display_keyword_performance) +- [display_topics_report](https://developers.google.com/google-ads/api/docs/migration/mapping#display_topics_performance) +- [shopping_performance_report](https://developers.google.com/google-ads/api/docs/migration/mapping#shopping_performance) +- [user_location_report](https://developers.google.com/google-ads/api/fields/v8/user_location_view) **Note**: Due to constraints from the Google Ads API, the `click_view` stream retrieves data one day at a time and can only retrieve data newer than 90 days ago @@ -64,13 +64,14 @@ Note that `ad_groups`, `ad_group_ads`, and `campaigns` contain a `labels` field, Google Ads Account with an approved Developer Token. \(note: In order to get API access to Google Ads, you must have a "manager" account; standard accounts cannot generate a Developer Token. This manager account must be created separately from your standard account. You can find more information about this distinction in the [Google Ads docs](https://support.google.com/google-ads/answer/6139186).\) You'll need to find these values. See the [setup guide](#setup-guide) for instructions. -* developer\_token -* client\_id -* client\_secret -* refresh\_token -* start\_date -* customer\_id -* login\_customer\_id \(you can find more information about this field in [Google Ads docs](https://developers.google.com/google-ads/api/docs/concepts/call-structure#cid)\) + +- developer_token +- client_id +- client_secret +- refresh_token +- start_date +- customer_id +- login_customer_id \(you can find more information about this field in [Google Ads docs](https://developers.google.com/google-ads/api/docs/concepts/call-structure#cid)\) ### Setup guide @@ -79,7 +80,7 @@ This guide will provide information as if starting from scratch. Please skip ove 1. Create an Google Ads Account. Here are [Google's instruction](https://support.google.com/google-ads/answer/6366720) on how to create one. 2. Create an Google Ads MANAGER Account. Here are [Google's instruction](https://ads.google.com/home/tools/manager-accounts/) on how to create one. 3. You should now have two Google Ads accounts: a normal account and a manager account. Link the Manager account to the normal account following [Google's documentation](https://support.google.com/google-ads/answer/7459601). -4. Apply for a developer token \(**make sure you follow our** [**instructions**](google-ads.md#how-to-apply-for-the-developer-token)\) on your Manager account. This token allows you to access your data from the Google Ads API. Here are [Google's instructions](https://developers.google.com/google-ads/api/docs/first-call/dev-token). The docs are a little unclear on this point, but you will _not_ be able to access your data via the Google Ads API until this token is approved. You cannot use a test developer token, it has to be at least a basic developer token. It usually takes Google 24 hours to respond to these applications. This developer token is the value you will use in the `developer_token` field. +4. Apply for a developer token \(**make sure you follow our** [**instructions**](google-ads.md#how-to-apply-for-the-developer-token)\) on your Manager account. This token allows you to access your data from the Google Ads API. Here are [Google's instructions](https://developers.google.com/google-ads/api/docs/first-call/dev-token). The docs are a little unclear on this point, but you will _not_ be able to access your data via the Google Ads API until this token is approved. You cannot use a test developer token, it has to be at least a basic developer token. It usually takes Google 24 hours to respond to these applications. This developer token is the value you will use in the `developer_token` field. 5. Fetch your `client_id`, `client_secret`, and `refresh_token`. Google provides [instructions](https://developers.google.com/google-ads/api/docs/first-call/overview) on how to do this. 6. Select your `customer_id`. The `customer_id` refers to the id of each of your Google Ads accounts. This is the 10 digit number in the top corner of the page when you are in Google Ads UI. The source will only pull data from the accounts for which you provide an id. If you are having trouble finding it, check out [Google's instructions](https://support.google.com/google-ads/answer/1704344). @@ -91,10 +92,10 @@ Google is very picky about which software and which use case can get access to a When you apply for a token, you need to mention: -* Why you need the token \(eg: want to run some internal analytics...\) -* That you will be using the Airbyte Open Source project -* That you have full access to the code base \(because we're open source\) -* That you have full access to the server running the code \(because you're self-hosting Airbyte\) +- Why you need the token \(eg: want to run some internal analytics...\) +- That you will be using the Airbyte Open Source project +- That you have full access to the code base \(because we're open source\) +- That you have full access to the server running the code \(because you're self-hosting Airbyte\) #### Understanding Google Ads Query Language @@ -106,9 +107,10 @@ This source is constrained by whatever API limits are set for the Google Ads tha ## CHANGELOG -| Version | Date | Pull Request | Subject | -|:---------|:-----------| :--- |:---------------------------------------------------------------------------------------------| -| `0.1.35` | 2022-04-18 | [9310](https://github.com/airbytehq/airbyte/pull/9310) | Add new fields to reports | +| Version | Date | Pull Request | Subject | +| :------- | :--------- | :------------------------------------------------------- | :------------------------------------------------------------------------------------------- | +| `0.1.36` | 2022-04-19 | [12158](https://github.com/airbytehq/airbyte/pull/12158) | Fix `*_labels` streams data type | +| `0.1.35` | 2022-04-18 | [9310](https://github.com/airbytehq/airbyte/pull/9310) | Add new fields to reports | | `0.1.34` | 2022-03-29 | [11602](https://github.com/airbytehq/airbyte/pull/11602) | Add budget amount to campaigns stream. | | `0.1.33` | 2022-03-29 | [11513](https://github.com/airbytehq/airbyte/pull/11513) | When `end_date` is configured in the future, use today's date instead. | | `0.1.32` | 2022-03-24 | [11371](https://github.com/airbytehq/airbyte/pull/11371) | Improve how connection check returns error messages | From aab15334745b4ae8d841d5543c5fef681cb94344 Mon Sep 17 00:00:00 2001 From: Andrii Leonets <30464745+DoNotPanicUA@users.noreply.github.com> Date: Thu, 28 Apr 2022 18:26:48 +0300 Subject: [PATCH 020/152] Pubsub, Pulsar, Redis, Redshift, Rocket destinations : Enable DAT tests (#12143) * enable DAT tests for Pulsar * Enable DAT test for pubsub, redis, redshift, rocket * format * fix normalized data fetch * cover "other" result type for arrays * remove deserialization because now we have already parsed node * fix bugspot * fix unicode case --- .../PubsubDestinationAcceptanceTest.java | 22 ++++++ .../pulsar/PulsarRecordConsumer.java | 3 +- .../PulsarDestinationAcceptanceTest.java | 22 ++++++ .../redis/RedisDestinationAcceptanceTest.java | 22 ++++++ ...RedshiftCopyDestinationAcceptanceTest.java | 69 ++++++++++++++----- .../redshift/RedshiftTestDataComparator.java | 57 +++++++++++++++ .../RocksetDestinationAcceptanceTest.java | 22 ++++++ 7 files changed, 200 insertions(+), 17 deletions(-) create mode 100644 airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftTestDataComparator.java diff --git a/airbyte-integrations/connectors/destination-pubsub/src/test-integration/java/io/airbyte/integrations/destination/pubsub/PubsubDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-pubsub/src/test-integration/java/io/airbyte/integrations/destination/pubsub/PubsubDestinationAcceptanceTest.java index a85de5db194522..aaadb701bd9fbe 100644 --- a/airbyte-integrations/connectors/destination-pubsub/src/test-integration/java/io/airbyte/integrations/destination/pubsub/PubsubDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-pubsub/src/test-integration/java/io/airbyte/integrations/destination/pubsub/PubsubDestinationAcceptanceTest.java @@ -38,6 +38,8 @@ import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.base.JavaBaseConstants; import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; +import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; +import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import java.io.ByteArrayInputStream; import java.io.IOException; import java.nio.charset.StandardCharsets; @@ -91,6 +93,26 @@ private AirbyteStreamNameNamespacePair fromJsonNode(final JsonNode j) { return new AirbyteStreamNameNamespacePair(stream, namespace); } + @Override + protected TestDataComparator getTestDataComparator() { + return new AdvancedTestDataComparator(); + } + + @Override + protected boolean supportBasicDataTypeTest() { + return true; + } + + @Override + protected boolean supportArrayDataTypeTest() { + return true; + } + + @Override + protected boolean supportObjectDataTypeTest() { + return true; + } + @Override protected List retrieveRecords(final TestDestinationEnv testEnv, final String streamName, diff --git a/airbyte-integrations/connectors/destination-pulsar/src/main/java/io/airbyte/integrations/destination/pulsar/PulsarRecordConsumer.java b/airbyte-integrations/connectors/destination-pulsar/src/main/java/io/airbyte/integrations/destination/pulsar/PulsarRecordConsumer.java index c22ac5c056c5bc..129b9e86348cb5 100644 --- a/airbyte-integrations/connectors/destination-pulsar/src/main/java/io/airbyte/integrations/destination/pulsar/PulsarRecordConsumer.java +++ b/airbyte-integrations/connectors/destination-pulsar/src/main/java/io/airbyte/integrations/destination/pulsar/PulsarRecordConsumer.java @@ -11,6 +11,7 @@ import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteRecordMessage; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.Map; import java.util.Optional; @@ -69,7 +70,7 @@ protected void acceptTracked(final AirbyteMessage airbyteMessage) { .set(PulsarDestination.COLUMN_NAME_AB_ID, key) .set(PulsarDestination.COLUMN_NAME_STREAM, recordMessage.getStream()) .set(PulsarDestination.COLUMN_NAME_EMITTED_AT, recordMessage.getEmittedAt()) - .set(PulsarDestination.COLUMN_NAME_DATA, recordMessage.getData().toString().getBytes()) + .set(PulsarDestination.COLUMN_NAME_DATA, recordMessage.getData().toString().getBytes(StandardCharsets.UTF_8)) .build(); sendRecord(producer, value); diff --git a/airbyte-integrations/connectors/destination-pulsar/src/test-integration/java/io/airbyte/integrations/destination/pulsar/PulsarDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-pulsar/src/test-integration/java/io/airbyte/integrations/destination/pulsar/PulsarDestinationAcceptanceTest.java index 26dae59de485cb..f31ec96dbfa61a 100644 --- a/airbyte-integrations/connectors/destination-pulsar/src/test-integration/java/io/airbyte/integrations/destination/pulsar/PulsarDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-pulsar/src/test-integration/java/io/airbyte/integrations/destination/pulsar/PulsarDestinationAcceptanceTest.java @@ -15,6 +15,8 @@ import io.airbyte.integrations.destination.NamingConventionTransformer; import io.airbyte.integrations.destination.StandardNameTransformer; import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; +import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; +import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import java.io.IOException; import java.net.InetAddress; import java.net.NetworkInterface; @@ -103,6 +105,26 @@ protected String getDefaultSchema(final JsonNode config) { return ""; } + @Override + protected TestDataComparator getTestDataComparator() { + return new AdvancedTestDataComparator(); + } + + @Override + protected boolean supportBasicDataTypeTest() { + return true; + } + + @Override + protected boolean supportArrayDataTypeTest() { + return true; + } + + @Override + protected boolean supportObjectDataTypeTest() { + return true; + } + @Override protected List retrieveNormalizedRecords(final TestDestinationEnv testEnv, final String streamName, final String namespace) throws IOException { diff --git a/airbyte-integrations/connectors/destination-redis/src/test-integration/java/io/airbyte/integrations/destination/redis/RedisDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-redis/src/test-integration/java/io/airbyte/integrations/destination/redis/RedisDestinationAcceptanceTest.java index ca80552dc3cb64..9dcc2312e2fcc8 100644 --- a/airbyte-integrations/connectors/destination-redis/src/test-integration/java/io/airbyte/integrations/destination/redis/RedisDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-redis/src/test-integration/java/io/airbyte/integrations/destination/redis/RedisDestinationAcceptanceTest.java @@ -7,6 +7,8 @@ import com.fasterxml.jackson.databind.JsonNode; import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; +import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; +import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import java.util.Comparator; import java.util.List; import java.util.stream.Collectors; @@ -68,6 +70,26 @@ protected boolean implementsNamespaces() { return true; } + @Override + protected TestDataComparator getTestDataComparator() { + return new AdvancedTestDataComparator(); + } + + @Override + protected boolean supportBasicDataTypeTest() { + return true; + } + + @Override + protected boolean supportArrayDataTypeTest() { + return true; + } + + @Override + protected boolean supportObjectDataTypeTest() { + return true; + } + @Override protected List retrieveRecords(TestDestinationEnv testEnv, String streamName, diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftCopyDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftCopyDestinationAcceptanceTest.java index f0e729a1ca5ee5..231252bb6b1a76 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftCopyDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftCopyDestinationAcceptanceTest.java @@ -5,20 +5,24 @@ package io.airbyte.integrations.destination.redshift; import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; import io.airbyte.commons.io.IOs; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.string.Strings; import io.airbyte.db.Database; import io.airbyte.db.Databases; -import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.integrations.base.JavaBaseConstants; import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; +import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import java.nio.file.Path; import java.sql.SQLException; -import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; +import org.jooq.Record; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Integration test testing {@link RedshiftCopyS3Destination}. The default Redshift integration test @@ -26,6 +30,8 @@ */ public class RedshiftCopyDestinationAcceptanceTest extends DestinationAcceptanceTest { + private static final Logger LOGGER = LoggerFactory.getLogger(RedshiftCopyDestinationAcceptanceTest.class); + // config from which to create / delete schemas. private JsonNode baseConfig; // config which refers to the schema that the test is being run in. @@ -34,6 +40,8 @@ public class RedshiftCopyDestinationAcceptanceTest extends DestinationAcceptance protected TestDestinationEnv testDestinationEnv; + private final ObjectMapper mapper = new ObjectMapper(); + @Override protected String getImageName() { return "airbyte/destination-redshift:dev"; @@ -55,6 +63,26 @@ protected JsonNode getFailCheckConfig() { return invalidConfig; } + @Override + protected TestDataComparator getTestDataComparator() { + return new RedshiftTestDataComparator(); + } + + @Override + protected boolean supportBasicDataTypeTest() { + return true; + } + + @Override + protected boolean supportArrayDataTypeTest() { + return true; + } + + @Override + protected boolean supportObjectDataTypeTest() { + return true; + } + @Override protected List retrieveRecords(final TestDestinationEnv env, final String streamName, @@ -63,7 +91,7 @@ protected List retrieveRecords(final TestDestinationEnv env, throws Exception { return retrieveRecordsFromTable(namingResolver.getRawTableName(streamName), namespace) .stream() - .map(j -> Jsons.deserialize(j.get(JavaBaseConstants.COLUMN_NAME_DATA).asText())) + .map(j -> j.get(JavaBaseConstants.COLUMN_NAME_DATA)) .collect(Collectors.toList()); } @@ -93,17 +121,27 @@ protected List retrieveNormalizedRecords(final TestDestinationEnv test return retrieveRecordsFromTable(tableName, namespace); } - @Override - protected List resolveIdentifier(final String identifier) { - final List result = new ArrayList<>(); - final String resolved = namingResolver.getIdentifier(identifier); - result.add(identifier); - result.add(resolved); - if (!resolved.startsWith("\"")) { - result.add(resolved.toLowerCase()); - result.add(resolved.toUpperCase()); - } - return result; + private JsonNode getJsonFromRecord(Record record) { + ObjectNode node = mapper.createObjectNode(); + + Arrays.stream(record.fields()).forEach(field -> { + var value = record.get(field); + + switch (field.getDataType().getTypeName()) { + case "varchar", "other": + var stringValue = (value != null ? value.toString() : null); + if (stringValue != null && (stringValue.replaceAll("[^\\x00-\\x7F]", "").matches("^\\[.*\\]$") + || stringValue.replaceAll("[^\\x00-\\x7F]", "").matches("^\\{.*\\}$"))) { + node.set(field.getName(), Jsons.deserialize(stringValue)); + } else { + node.put(field.getName(), stringValue); + } + break; + default: + node.put(field.getName(), (value != null ? value.toString() : null)); + } + }); + return node; } private List retrieveRecordsFromTable(final String tableName, final String schemaName) throws SQLException { @@ -111,8 +149,7 @@ private List retrieveRecordsFromTable(final String tableName, final St ctx -> ctx .fetch(String.format("SELECT * FROM %s.%s ORDER BY %s ASC;", schemaName, tableName, JavaBaseConstants.COLUMN_NAME_EMITTED_AT)) .stream() - .map(r -> r.formatJSON(JdbcUtils.getDefaultJSONFormat())) - .map(Jsons::deserialize) + .map(this::getJsonFromRecord) .collect(Collectors.toList())); } diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftTestDataComparator.java b/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftTestDataComparator.java new file mode 100644 index 00000000000000..6b018e9cd7f4e7 --- /dev/null +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftTestDataComparator.java @@ -0,0 +1,57 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.redshift; + +import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; +import java.time.DateTimeException; +import java.time.LocalDateTime; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; +import java.util.ArrayList; +import java.util.List; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class RedshiftTestDataComparator extends AdvancedTestDataComparator { + + private static final Logger LOGGER = LoggerFactory.getLogger(RedshiftTestDataComparator.class); + + private final RedshiftSQLNameTransformer namingResolver = new RedshiftSQLNameTransformer(); + + protected static final String REDSHIFT_DATETIME_WITH_TZ_FORMAT = "yyyy-MM-dd HH:mm:ssX"; + + @Override + protected ZonedDateTime parseDestinationDateWithTz(String destinationValue) { + return ZonedDateTime.parse(destinationValue, DateTimeFormatter.ofPattern(REDSHIFT_DATETIME_WITH_TZ_FORMAT)).withZoneSameInstant(ZoneOffset.UTC); + } + + @Override + protected boolean compareDateTimeValues(String airbyteMessageValue, String destinationValue) { + try { + var format = DateTimeFormatter.ofPattern(AIRBYTE_DATETIME_FORMAT); + LocalDateTime dateTime = LocalDateTime.parse(destinationValue, DateTimeFormatter.ofPattern(REDSHIFT_DATETIME_WITH_TZ_FORMAT)); + return super.compareDateTimeValues(airbyteMessageValue, format.format(dateTime)); + } catch (DateTimeException e) { + LOGGER.warn("Fail to convert values to DateTime. Try to compare as text. Airbyte value({}), Destination value ({}). Exception: {}", + airbyteMessageValue, destinationValue, e); + return compareTextValues(airbyteMessageValue, destinationValue); + } + } + + @Override + protected List resolveIdentifier(final String identifier) { + final List result = new ArrayList<>(); + final String resolved = namingResolver.getIdentifier(identifier); + result.add(identifier); + result.add(resolved); + if (!resolved.startsWith("\"")) { + result.add(resolved.toLowerCase()); + result.add(resolved.toUpperCase()); + } + return result; + } + +} diff --git a/airbyte-integrations/connectors/destination-rockset/src/test-integration/java/io/airbyte/integrations/destination/rockset/RocksetDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-rockset/src/test-integration/java/io/airbyte/integrations/destination/rockset/RocksetDestinationAcceptanceTest.java index cfd96d02dffa6b..a0efe58576acc5 100644 --- a/airbyte-integrations/connectors/destination-rockset/src/test-integration/java/io/airbyte/integrations/destination/rockset/RocksetDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-rockset/src/test-integration/java/io/airbyte/integrations/destination/rockset/RocksetDestinationAcceptanceTest.java @@ -18,6 +18,8 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.lang.Exceptions; import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; +import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; +import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import java.io.IOException; import java.nio.file.Path; import java.util.ArrayList; @@ -54,6 +56,26 @@ protected JsonNode getConfig() throws IOException { return Jsons.deserialize(IOs.readFile(Path.of("secrets/config.json"))); } + @Override + protected TestDataComparator getTestDataComparator() { + return new AdvancedTestDataComparator(); + } + + @Override + protected boolean supportBasicDataTypeTest() { + return true; + } + + @Override + protected boolean supportArrayDataTypeTest() { + return true; + } + + @Override + protected boolean supportObjectDataTypeTest() { + return true; + } + @Override protected JsonNode getFailCheckConfig() throws Exception { return Jsons.jsonNode( From e0902e6e30fa467accb8531cf33135ecd84276cd Mon Sep 17 00:00:00 2001 From: Marcos Marx Date: Thu, 28 Apr 2022 13:00:28 -0300 Subject: [PATCH 021/152] :bug: Source Hubspot: correct createAt and updateAd data type (#12424) * correct createAt and updateAd data type * bump connectorversion * bump connector version again --- .../src/main/resources/seed/source_definitions.yaml | 2 +- .../init/src/main/resources/seed/source_specs.yaml | 2 +- .../connectors/source-hubspot/Dockerfile | 2 +- .../source_hubspot/schemas/ticket_pipelines.json | 12 ++++++++---- docs/integrations/sources/hubspot.md | 3 +++ 5 files changed, 14 insertions(+), 7 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index 2f1f37afafa69b..05a792432837ab 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -341,7 +341,7 @@ - name: HubSpot sourceDefinitionId: 36c891d9-4bd9-43ac-bad2-10e12756272c dockerRepository: airbyte/source-hubspot - dockerImageTag: 0.1.53 + dockerImageTag: 0.1.55 documentationUrl: https://docs.airbyte.io/integrations/sources/hubspot icon: hubspot.svg sourceType: api diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index 8830d25e2ba52f..ab3c619826b949 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -3480,7 +3480,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-hubspot:0.1.53" +- dockerImage: "airbyte/source-hubspot:0.1.55" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/hubspot" connectionSpecification: diff --git a/airbyte-integrations/connectors/source-hubspot/Dockerfile b/airbyte-integrations/connectors/source-hubspot/Dockerfile index d7b8bd20b57224..504b0f5d2164d4 100644 --- a/airbyte-integrations/connectors/source-hubspot/Dockerfile +++ b/airbyte-integrations/connectors/source-hubspot/Dockerfile @@ -34,5 +34,5 @@ COPY source_hubspot ./source_hubspot ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.53 +LABEL io.airbyte.version=0.1.55 LABEL io.airbyte.name=airbyte/source-hubspot diff --git a/airbyte-integrations/connectors/source-hubspot/source_hubspot/schemas/ticket_pipelines.json b/airbyte-integrations/connectors/source-hubspot/source_hubspot/schemas/ticket_pipelines.json index b360fb642613eb..5d75223f315341 100644 --- a/airbyte-integrations/connectors/source-hubspot/source_hubspot/schemas/ticket_pipelines.json +++ b/airbyte-integrations/connectors/source-hubspot/source_hubspot/schemas/ticket_pipelines.json @@ -37,10 +37,12 @@ "type": ["null", "string"] }, "createdAt": { - "type": ["null", "integer"] + "type": ["null", "string"], + "format": "date-time" }, "updatedAt": { - "type": ["null", "integer"] + "type": ["null", "string"], + "format": "date-time" }, "active": { "type": ["null", "boolean"] @@ -49,10 +51,12 @@ } }, "createdAt": { - "type": ["null", "integer"] + "type": ["null", "string"], + "format": "date-time" }, "updatedAt": { - "type": ["null", "integer"] + "type": ["null", "string"], + "format": "date-time" } } } diff --git a/docs/integrations/sources/hubspot.md b/docs/integrations/sources/hubspot.md index ffabe35eca3df0..353218e67d6142 100644 --- a/docs/integrations/sources/hubspot.md +++ b/docs/integrations/sources/hubspot.md @@ -147,6 +147,9 @@ If you are using OAuth, most of the streams require the appropriate [scopes](htt | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------| +| 0.1.55 | 2022-04-28 | [12424](https://github.com/airbytehq/airbyte/pull/12424) | Correct schema for ticket_pipeline stream | +| 0.1.54 | 2022-04-28 | [12335](https://github.com/airbytehq/airbyte/pull/12335) | Mock time slep in unit test s | +| 0.1.53 | 2022-04-20 | [12230](https://github.com/airbytehq/airbyte/pull/12230) | chaneg spec json to yaml format | | 0.1.52 | 2022-03-25 | [11423](https://github.com/airbytehq/airbyte/pull/11423) | Add tickets associations to engagements streams | | 0.1.51 | 2022-03-24 | [11321](https://github.com/airbytehq/airbyte/pull/11321) | Fix updated at field non exists issue | | 0.1.50 | 2022-03-22 | [11266](https://github.com/airbytehq/airbyte/pull/11266) | Fix Engagements Stream Pagination | From 5a0666d75aefede1e15188a4fdb41024a01d78cc Mon Sep 17 00:00:00 2001 From: Edmundo Ruiz Ghanem <168664+edmundito@users.noreply.github.com> Date: Thu, 28 Apr 2022 14:20:23 -0400 Subject: [PATCH 022/152] Update "empty" status in StatusIcon to "sleep" with new icon (#12436) * Update status icon empty state with new Moon icon Add StatusIcon storybook * Move PauseIcon to components/icons * Fix support for big status icon for inactive and empty * Rename status icon empty state to sleep --- .../components/AllConnectionsStatusCell.tsx | 2 +- .../EntityTable/components/NameCell.tsx | 2 +- .../components/StatusIcon/StatusIcon.test.tsx | 2 +- .../src/components/StatusIcon/StatusIcon.tsx | 31 ++++++++++++------- .../components/StatusIcon/index.stories.tsx | 16 ++++++++++ .../src/components/icons/MoonIcon.tsx | 15 +++++++++ .../{StatusIcon => icons}/PauseIcon.tsx | 2 +- 7 files changed, 55 insertions(+), 15 deletions(-) create mode 100644 airbyte-webapp/src/components/StatusIcon/index.stories.tsx create mode 100644 airbyte-webapp/src/components/icons/MoonIcon.tsx rename airbyte-webapp/src/components/{StatusIcon => icons}/PauseIcon.tsx (80%) diff --git a/airbyte-webapp/src/components/EntityTable/components/AllConnectionsStatusCell.tsx b/airbyte-webapp/src/components/EntityTable/components/AllConnectionsStatusCell.tsx index 46b01f149e1a29..2479bb7035ec45 100644 --- a/airbyte-webapp/src/components/EntityTable/components/AllConnectionsStatusCell.tsx +++ b/airbyte-webapp/src/components/EntityTable/components/AllConnectionsStatusCell.tsx @@ -10,7 +10,7 @@ const _statusConfig: { status: Status; statusIconStatus?: StatusIconStatus; titl { status: Status.ACTIVE, statusIconStatus: "success", titleId: "connection.successSync" }, { status: Status.INACTIVE, statusIconStatus: "inactive", titleId: "connection.disabledConnection" }, { status: Status.FAILED, titleId: "connection.failedSync" }, - { status: Status.EMPTY, statusIconStatus: "empty", titleId: "connection.noSyncData" }, + { status: Status.EMPTY, statusIconStatus: "sleep", titleId: "connection.noSyncData" }, ]; interface AllConnectionStatusConnectEntity { diff --git a/airbyte-webapp/src/components/EntityTable/components/NameCell.tsx b/airbyte-webapp/src/components/EntityTable/components/NameCell.tsx index f9f047bd224d59..a628e9915c7855 100644 --- a/airbyte-webapp/src/components/EntityTable/components/NameCell.tsx +++ b/airbyte-webapp/src/components/EntityTable/components/NameCell.tsx @@ -45,7 +45,7 @@ const NameCell: React.FC = ({ value, enabled, status, icon, img }) => { const statusIconStatus = useMemo( () => status === Status.EMPTY - ? "empty" + ? "sleep" : status === Status.ACTIVE ? "success" : status === Status.INACTIVE diff --git a/airbyte-webapp/src/components/StatusIcon/StatusIcon.test.tsx b/airbyte-webapp/src/components/StatusIcon/StatusIcon.test.tsx index 2a04b50e3865da..23f989ffac89f1 100644 --- a/airbyte-webapp/src/components/StatusIcon/StatusIcon.test.tsx +++ b/airbyte-webapp/src/components/StatusIcon/StatusIcon.test.tsx @@ -17,7 +17,7 @@ describe("", () => { const statusCases: { status: StatusIconStatus; icon: string }[] = [ { status: "success", icon: "check" }, { status: "inactive", icon: "pause" }, - { status: "empty", icon: "ban" }, + { status: "sleep", icon: "moon" }, { status: "warning", icon: "triangle-exclamation" }, { status: "loading", icon: "circle-loader" }, ]; diff --git a/airbyte-webapp/src/components/StatusIcon/StatusIcon.tsx b/airbyte-webapp/src/components/StatusIcon/StatusIcon.tsx index 9bd0b986ad6a3c..0e990344ad9b48 100644 --- a/airbyte-webapp/src/components/StatusIcon/StatusIcon.tsx +++ b/airbyte-webapp/src/components/StatusIcon/StatusIcon.tsx @@ -3,12 +3,14 @@ import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; import React from "react"; import styled from "styled-components"; +import { MoonIcon } from "components/icons/MoonIcon"; + +import PauseIcon from "../icons/PauseIcon"; import CircleLoader from "./CircleLoader"; -import PauseIcon from "./PauseIcon"; -export type StatusIconStatus = "empty" | "inactive" | "success" | "warning" | "loading"; +export type StatusIconStatus = "sleep" | "inactive" | "success" | "warning" | "loading"; -interface Props { +interface StatusIconProps { className?: string; status?: StatusIconStatus; title?: string; @@ -16,22 +18,22 @@ interface Props { value?: string | number; } -const getBadgeWidth = (props: Props) => (props.big ? (props.value ? 57 : 40) : props.value ? 37 : 20); +const getBadgeWidth = (props: StatusIconProps) => (props.big ? (props.value ? 57 : 40) : props.value ? 37 : 20); const _iconByStatus: Partial> = { - empty: faBan, + sleep: faBan, success: faCheck, warning: faExclamationTriangle, }; const _themeByStatus: Partial> = { - empty: "attentionColor", + sleep: "lightTextColor", inactive: "lightTextColor", success: "successColor", warning: "warningColor", }; -const Container = styled.div` +const Container = styled.div` width: ${(props) => getBadgeWidth(props)}px; height: ${({ big }) => (big ? 40 : 20)}px; margin-right: 10px; @@ -39,14 +41,19 @@ const Container = styled.div` line-height: ${({ big }) => (big ? 33 : 12)}px; text-align: center; display: inline-block; - vertical-align: top; + vertical-align: middle; `; -const Badge = styled(Container)` +const Badge = styled(Container)` background: ${(props) => props.theme[(props.status && _themeByStatus[props.status]) || "dangerColor"]}; border-radius: ${({ value }) => (value ? "15px" : "50%")}; color: ${({ theme }) => theme.whiteColor}; - padding-top: 4px; + padding-top: ${({ status }) => (status === "warning" || status === "inactive" ? 3 : 4)}px; + + > svg { + height: 1em; + vertical-align: -0.125em; + } `; const Value = styled.span` @@ -56,7 +63,7 @@ const Value = styled.span` vertical-align: top; `; -const StatusIcon: React.FC = ({ title, status, ...props }) => { +const StatusIcon: React.FC = ({ title, status, ...props }) => { const valueElement = props.value ? {props.value} : null; if (status === "loading") { @@ -72,6 +79,8 @@ const StatusIcon: React.FC = ({ title, status, ...props }) => { {status === "inactive" ? ( + ) : status === "sleep" ? ( + ) : ( )} diff --git a/airbyte-webapp/src/components/StatusIcon/index.stories.tsx b/airbyte-webapp/src/components/StatusIcon/index.stories.tsx new file mode 100644 index 00000000000000..c34a842db828a7 --- /dev/null +++ b/airbyte-webapp/src/components/StatusIcon/index.stories.tsx @@ -0,0 +1,16 @@ +import { ComponentStory, ComponentMeta } from "@storybook/react"; + +import StatusIconComponent from "./StatusIcon"; + +export default { + title: "Ui/StatusIcon", + component: StatusIconComponent, + argTypes: {}, +} as ComponentMeta; + +const Template: ComponentStory = (args) => ; + +export const StatusIcon = Template.bind({}); +StatusIcon.args = { + status: "success", +}; diff --git a/airbyte-webapp/src/components/icons/MoonIcon.tsx b/airbyte-webapp/src/components/icons/MoonIcon.tsx new file mode 100644 index 00000000000000..6e97f0b5036c23 --- /dev/null +++ b/airbyte-webapp/src/components/icons/MoonIcon.tsx @@ -0,0 +1,15 @@ +interface MoonProps { + title?: string; +} + +export const MoonIcon = ({ title }: MoonProps): JSX.Element => ( + + {title && {title}} + + +); diff --git a/airbyte-webapp/src/components/StatusIcon/PauseIcon.tsx b/airbyte-webapp/src/components/icons/PauseIcon.tsx similarity index 80% rename from airbyte-webapp/src/components/StatusIcon/PauseIcon.tsx rename to airbyte-webapp/src/components/icons/PauseIcon.tsx index a602780b309fc0..1b08b1417b2620 100644 --- a/airbyte-webapp/src/components/StatusIcon/PauseIcon.tsx +++ b/airbyte-webapp/src/components/icons/PauseIcon.tsx @@ -4,7 +4,7 @@ interface Props { } const PauseIcon = ({ color = "currentColor", title }: Props): JSX.Element => ( - + {title && {title}} From 4a9f663565fab497638ebb30a063fe76f0b777a4 Mon Sep 17 00:00:00 2001 From: noahkawasaki-airbyte <103465980+noahkawasaki-airbyte@users.noreply.github.com> Date: Thu, 28 Apr 2022 11:25:31 -0700 Subject: [PATCH 023/152] Add experimental build-connector and publish-connector slash commands (#12442) --- .github/workflows/slash-commands.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/slash-commands.yml b/.github/workflows/slash-commands.yml index 6f01c940e447eb..1c2aed0b941e13 100644 --- a/.github/workflows/slash-commands.yml +++ b/.github/workflows/slash-commands.yml @@ -22,6 +22,8 @@ jobs: commands: | test test-performance + build-connector + publish-connector publish publish-external publish-cdk From bb2da42fded8db6632447394d44b9bfa6b2b1ceb Mon Sep 17 00:00:00 2001 From: Brian Lai <51336873+brianjlai@users.noreply.github.com> Date: Thu, 28 Apr 2022 12:21:45 -0700 Subject: [PATCH 024/152] source freshdesk, hubspot, mixpanel, paypal-transaction, salesforce, zendesk-support: adding fixtures to mock time.sleep for connectors that explicitly sleep (#12335) * adding fixtures to mock time.sleep for connectors that explicitly sleep * bump connector versions * update changelog doc for each connector * auto-bump connector version * auto-bump connector version * auto-bump connector version * auto-bump connector version * auto-bump connector version * remove version bump for freshdesk because connector tests are in a bad state Co-authored-by: Octavia Squidington III --- .../resources/seed/source_definitions.yaml | 8 ++-- .../src/main/resources/seed/source_specs.yaml | 8 ++-- .../connectors/source-freshdesk/setup.py | 1 + .../unit_tests/test_client.py | 7 ++++ .../connectors/source-hubspot/setup.py | 1 + .../source-hubspot/unit_tests/test_source.py | 6 +++ .../source-hubspot/unit_tests/test_streams.py | 6 +++ .../connectors/source-mixpanel/Dockerfile | 2 +- .../unit_tests/test_streams.py | 6 +++ .../source-paypal-transaction/Dockerfile | 2 +- .../source-paypal-transaction/setup.py | 1 + .../unit_tests/unit_test.py | 7 ++++ .../connectors/source-salesforce/Dockerfile | 2 +- .../connectors/source-salesforce/setup.py | 2 +- .../source_salesforce/streams.py | 1 + .../source-salesforce/unit_tests/api_test.py | 9 +++- .../unit_tests/discovery_test.py | 6 +++ .../unit_tests/test_memory.py | 6 +++ .../source-zendesk-support/Dockerfile | 2 +- .../source-zendesk-support/setup.py | 2 +- .../unit_tests/test_futures.py | 6 +++ .../unit_tests/unit_test.py | 6 +++ docs/integrations/sources/freshdesk.md | 14 +++---- docs/integrations/sources/mixpanel.md | 1 + .../sources/paypal-transaction.md | 15 +++---- docs/integrations/sources/salesforce.md | 3 +- docs/integrations/sources/zendesk-support.md | 41 ++++++++++--------- 27 files changed, 121 insertions(+), 50 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index 05a792432837ab..7a6d3fba64f361 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -473,7 +473,7 @@ - name: Mixpanel sourceDefinitionId: 12928b32-bf0a-4f1e-964f-07e12e37153a dockerRepository: airbyte/source-mixpanel - dockerImageTag: 0.1.12 + dockerImageTag: 0.1.13 documentationUrl: https://docs.airbyte.io/integrations/sources/mixpanel icon: mixpanel.svg sourceType: api @@ -563,7 +563,7 @@ - name: Paypal Transaction sourceDefinitionId: d913b0f2-cc51-4e55-a44c-8ba1697b9239 dockerRepository: airbyte/source-paypal-transaction - dockerImageTag: 0.1.4 + dockerImageTag: 0.1.5 documentationUrl: https://docs.airbyte.io/integrations/sources/paypal-transaction icon: paypal.svg sourceType: api @@ -695,7 +695,7 @@ - name: Salesforce sourceDefinitionId: b117307c-14b6-41aa-9422-947e34922962 dockerRepository: airbyte/source-salesforce - dockerImageTag: 1.0.5 + dockerImageTag: 1.0.6 documentationUrl: https://docs.airbyte.io/integrations/sources/salesforce icon: salesforce.svg sourceType: api @@ -862,7 +862,7 @@ - name: Zendesk Support sourceDefinitionId: 79c1aa37-dae3-42ae-b333-d1c105477715 dockerRepository: airbyte/source-zendesk-support - dockerImageTag: 0.2.6 + dockerImageTag: 0.2.7 documentationUrl: https://docs.airbyte.io/integrations/sources/zendesk-support icon: zendesk.svg sourceType: api diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index ab3c619826b949..dc76241ec531b3 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -5024,7 +5024,7 @@ path_in_connector_config: - "credentials" - "client_secret" -- dockerImage: "airbyte/source-mixpanel:0.1.12" +- dockerImage: "airbyte/source-mixpanel:0.1.13" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/mixpanel" connectionSpecification: @@ -6122,7 +6122,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-paypal-transaction:0.1.4" +- dockerImage: "airbyte/source-paypal-transaction:0.1.5" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/paypal-transactions" connectionSpecification: @@ -7346,7 +7346,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-salesforce:1.0.5" +- dockerImage: "airbyte/source-salesforce:1.0.6" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/salesforce" connectionSpecification: @@ -9328,7 +9328,7 @@ path_in_connector_config: - "credentials" - "client_secret" -- dockerImage: "airbyte/source-zendesk-support:0.2.6" +- dockerImage: "airbyte/source-zendesk-support:0.2.7" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/zendesk-support" connectionSpecification: diff --git a/airbyte-integrations/connectors/source-freshdesk/setup.py b/airbyte-integrations/connectors/source-freshdesk/setup.py index bf1886fd6a9c18..323f191ae8db2b 100644 --- a/airbyte-integrations/connectors/source-freshdesk/setup.py +++ b/airbyte-integrations/connectors/source-freshdesk/setup.py @@ -14,6 +14,7 @@ TEST_REQUIREMENTS = [ "pytest==6.1.2", + "pytest-mock~=3.6", "requests_mock==1.8.0", "source-acceptance-test", ] diff --git a/airbyte-integrations/connectors/source-freshdesk/unit_tests/test_client.py b/airbyte-integrations/connectors/source-freshdesk/unit_tests/test_client.py index 7c6b171ac7d592..9219fd64be9a48 100644 --- a/airbyte-integrations/connectors/source-freshdesk/unit_tests/test_client.py +++ b/airbyte-integrations/connectors/source-freshdesk/unit_tests/test_client.py @@ -5,11 +5,18 @@ from pathlib import Path +from pytest import fixture from source_freshdesk.client import Client HERE = Path(__file__).parent.absolute() +@fixture(autouse=True) +def time_sleep_mock(mocker): + time_mock = mocker.patch("time.sleep", lambda x: None) + yield time_mock + + def test_client_backoff_on_limit_reached(requests_mock): """Error once, check that we retry and not fail""" responses = [ diff --git a/airbyte-integrations/connectors/source-hubspot/setup.py b/airbyte-integrations/connectors/source-hubspot/setup.py index cf39bd55635066..76a85a488bf9f8 100644 --- a/airbyte-integrations/connectors/source-hubspot/setup.py +++ b/airbyte-integrations/connectors/source-hubspot/setup.py @@ -14,6 +14,7 @@ TEST_REQUIREMENTS = [ "pytest==6.1.2", + "pytest-mock~=3.6", "requests_mock==1.8.0", "source-acceptance-test", ] diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/test_source.py b/airbyte-integrations/connectors/source-hubspot/unit_tests/test_source.py index b59ce456040bba..54b1722420f338 100644 --- a/airbyte-integrations/connectors/source-hubspot/unit_tests/test_source.py +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/test_source.py @@ -29,6 +29,12 @@ logger = logging.getLogger("test_client") +@pytest.fixture(autouse=True) +def time_sleep_mock(mocker): + time_mock = mocker.patch("time.sleep", lambda x: None) + yield time_mock + + def test_check_connection_ok(requests_mock, config): responses = [ {"json": [], "status_code": 200}, diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-hubspot/unit_tests/test_streams.py index 256a2ff3bd67cb..d3016f5d500a0b 100644 --- a/airbyte-integrations/connectors/source-hubspot/unit_tests/test_streams.py +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/test_streams.py @@ -33,6 +33,12 @@ from .utils import read_full_refresh, read_incremental +@pytest.fixture(autouse=True) +def time_sleep_mock(mocker): + time_mock = mocker.patch("time.sleep", lambda x: None) + yield time_mock + + def test_updated_at_field_non_exist_handler(requests_mock, common_params, fake_properties_list): stream = ContactLists(**common_params) diff --git a/airbyte-integrations/connectors/source-mixpanel/Dockerfile b/airbyte-integrations/connectors/source-mixpanel/Dockerfile index 99bea641bf4f32..1e2c0c416bc6db 100644 --- a/airbyte-integrations/connectors/source-mixpanel/Dockerfile +++ b/airbyte-integrations/connectors/source-mixpanel/Dockerfile @@ -13,5 +13,5 @@ ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.12 +LABEL io.airbyte.version=0.1.13 LABEL io.airbyte.name=airbyte/source-mixpanel diff --git a/airbyte-integrations/connectors/source-mixpanel/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-mixpanel/unit_tests/test_streams.py index 71caf4b9b6fdf7..aa2623f83ea9ae 100644 --- a/airbyte-integrations/connectors/source-mixpanel/unit_tests/test_streams.py +++ b/airbyte-integrations/connectors/source-mixpanel/unit_tests/test_streams.py @@ -41,6 +41,12 @@ def patch_incremental_base_class(mocker): mocker.patch.object(IncrementalMixpanelStream, "__abstractmethods__", set()) +@pytest.fixture(autouse=True) +def time_sleep_mock(mocker): + time_mock = mocker.patch("time.sleep", lambda x: None) + yield time_mock + + def test_url_base(patch_base_class): stream = MixpanelStream(authenticator=MagicMock()) diff --git a/airbyte-integrations/connectors/source-paypal-transaction/Dockerfile b/airbyte-integrations/connectors/source-paypal-transaction/Dockerfile index 3372b968013222..0c89c2ae46500e 100644 --- a/airbyte-integrations/connectors/source-paypal-transaction/Dockerfile +++ b/airbyte-integrations/connectors/source-paypal-transaction/Dockerfile @@ -12,5 +12,5 @@ RUN pip install . ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.4 +LABEL io.airbyte.version=0.1.5 LABEL io.airbyte.name=airbyte/source-paypal-transaction diff --git a/airbyte-integrations/connectors/source-paypal-transaction/setup.py b/airbyte-integrations/connectors/source-paypal-transaction/setup.py index 2ff46a9c8ea387..73c3a8ad9fa8c1 100644 --- a/airbyte-integrations/connectors/source-paypal-transaction/setup.py +++ b/airbyte-integrations/connectors/source-paypal-transaction/setup.py @@ -11,6 +11,7 @@ TEST_REQUIREMENTS = [ "pytest~=6.1", + "pytest-mock~=3.6", "source-acceptance-test", ] diff --git a/airbyte-integrations/connectors/source-paypal-transaction/unit_tests/unit_test.py b/airbyte-integrations/connectors/source-paypal-transaction/unit_tests/unit_test.py index e43122a439b4f9..30f0acf3025f27 100644 --- a/airbyte-integrations/connectors/source-paypal-transaction/unit_tests/unit_test.py +++ b/airbyte-integrations/connectors/source-paypal-transaction/unit_tests/unit_test.py @@ -6,9 +6,16 @@ from airbyte_cdk.sources.streams.http.auth import NoAuth from dateutil.parser import isoparse +from pytest import fixture from source_paypal_transaction.source import Balances, PaypalTransactionStream, Transactions +@fixture(autouse=True) +def time_sleep_mock(mocker): + time_mock = mocker.patch("time.sleep", lambda x: None) + yield time_mock + + def test_get_field(): record = {"a": {"b": {"c": "d"}}} diff --git a/airbyte-integrations/connectors/source-salesforce/Dockerfile b/airbyte-integrations/connectors/source-salesforce/Dockerfile index 9a78445a5d819d..14c1e7efa565f7 100644 --- a/airbyte-integrations/connectors/source-salesforce/Dockerfile +++ b/airbyte-integrations/connectors/source-salesforce/Dockerfile @@ -13,5 +13,5 @@ RUN pip install . ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=1.0.5 +LABEL io.airbyte.version=1.0.6 LABEL io.airbyte.name=airbyte/source-salesforce diff --git a/airbyte-integrations/connectors/source-salesforce/setup.py b/airbyte-integrations/connectors/source-salesforce/setup.py index 1d16e4376bbae9..0f0d4635b22ed1 100644 --- a/airbyte-integrations/connectors/source-salesforce/setup.py +++ b/airbyte-integrations/connectors/source-salesforce/setup.py @@ -7,7 +7,7 @@ MAIN_REQUIREMENTS = ["airbyte-cdk", "vcrpy==4.1.1", "pandas"] -TEST_REQUIREMENTS = ["pytest~=6.1", "requests_mock", "source-acceptance-test", "pytest-timeout"] +TEST_REQUIREMENTS = ["pytest~=6.1", "pytest-mock~=3.6", "requests_mock", "source-acceptance-test", "pytest-timeout"] setup( name="source_salesforce", diff --git a/airbyte-integrations/connectors/source-salesforce/source_salesforce/streams.py b/airbyte-integrations/connectors/source-salesforce/source_salesforce/streams.py index 05b23decdf6175..288ba776e9df1c 100644 --- a/airbyte-integrations/connectors/source-salesforce/source_salesforce/streams.py +++ b/airbyte-integrations/connectors/source-salesforce/source_salesforce/streams.py @@ -485,6 +485,7 @@ class Describe(Stream): Stream of sObjects' (Salesforce Objects) describe: https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/resources_sobject_describe.htm """ + name = "Describe" primary_key = "name" diff --git a/airbyte-integrations/connectors/source-salesforce/unit_tests/api_test.py b/airbyte-integrations/connectors/source-salesforce/unit_tests/api_test.py index 5b577b9bb2aa8a..cb81bf6e462668 100644 --- a/airbyte-integrations/connectors/source-salesforce/unit_tests/api_test.py +++ b/airbyte-integrations/connectors/source-salesforce/unit_tests/api_test.py @@ -24,6 +24,12 @@ ) +@pytest.fixture(autouse=True) +def time_sleep_mock(mocker): + time_mock = mocker.patch("time.sleep", lambda x: None) + yield time_mock + + def test_bulk_sync_creation_failed(stream_config, stream_api): stream: BulkIncrementalSalesforceStream = generate_stream("Account", stream_config, stream_api) with requests_mock.Mocker() as m: @@ -482,7 +488,8 @@ def test_forwarding_sobject_options(stream_config, stream_names, catalog_stream_ "flag1": True, "queryable": True, } - for stream_name in stream_names if stream_name != "Describe" + for stream_name in stream_names + if stream_name != "Describe" ], }, ) diff --git a/airbyte-integrations/connectors/source-salesforce/unit_tests/discovery_test.py b/airbyte-integrations/connectors/source-salesforce/unit_tests/discovery_test.py index ed3192bfa095f2..0bc7ea48aee530 100644 --- a/airbyte-integrations/connectors/source-salesforce/unit_tests/discovery_test.py +++ b/airbyte-integrations/connectors/source-salesforce/unit_tests/discovery_test.py @@ -9,6 +9,12 @@ from source_salesforce.exceptions import TypeSalesforceException +@pytest.fixture(autouse=True) +def time_sleep_mock(mocker): + time_mock = mocker.patch("time.sleep", lambda x: None) + yield time_mock + + @pytest.mark.parametrize( "streams_criteria,predicted_filtered_streams", [ diff --git a/airbyte-integrations/connectors/source-salesforce/unit_tests/test_memory.py b/airbyte-integrations/connectors/source-salesforce/unit_tests/test_memory.py index 0d3dd27c303109..582f4c440c84a6 100644 --- a/airbyte-integrations/connectors/source-salesforce/unit_tests/test_memory.py +++ b/airbyte-integrations/connectors/source-salesforce/unit_tests/test_memory.py @@ -11,6 +11,12 @@ from source_salesforce.streams import BulkIncrementalSalesforceStream +@pytest.fixture(autouse=True) +def time_sleep_mock(mocker): + time_mock = mocker.patch("time.sleep", lambda x: None) + yield time_mock + + @pytest.mark.parametrize( "n_records, first_size, first_peak", ( diff --git a/airbyte-integrations/connectors/source-zendesk-support/Dockerfile b/airbyte-integrations/connectors/source-zendesk-support/Dockerfile index 72bc2ab36ba5ec..6519e236e3820a 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/Dockerfile +++ b/airbyte-integrations/connectors/source-zendesk-support/Dockerfile @@ -25,5 +25,5 @@ COPY source_zendesk_support ./source_zendesk_support ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.2.6 +LABEL io.airbyte.version=0.2.7 LABEL io.airbyte.name=airbyte/source-zendesk-support diff --git a/airbyte-integrations/connectors/source-zendesk-support/setup.py b/airbyte-integrations/connectors/source-zendesk-support/setup.py index b15858f0bf879b..86ab517a9fad4b 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/setup.py +++ b/airbyte-integrations/connectors/source-zendesk-support/setup.py @@ -7,7 +7,7 @@ MAIN_REQUIREMENTS = ["airbyte-cdk~=0.1.36", "pytz", "requests-futures~=1.0.0", "pendulum~=2.1.2"] -TEST_REQUIREMENTS = ["pytest~=6.1", "source-acceptance-test", "requests-mock==1.9.3"] +TEST_REQUIREMENTS = ["pytest~=6.1", "pytest-mock~=3.6", "source-acceptance-test", "requests-mock==1.9.3"] setup( version="0.1.0", diff --git a/airbyte-integrations/connectors/source-zendesk-support/unit_tests/test_futures.py b/airbyte-integrations/connectors/source-zendesk-support/unit_tests/test_futures.py index fb86fe2cf2b3e0..5afeb5dd9d724d 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/unit_tests/test_futures.py +++ b/airbyte-integrations/connectors/source-zendesk-support/unit_tests/test_futures.py @@ -21,6 +21,12 @@ } +@pytest.fixture(autouse=True) +def time_sleep_mock(mocker): + time_mock = mocker.patch("time.sleep", lambda x: None) + yield time_mock + + @pytest.mark.parametrize( "records_count,page_size,expected_futures_deque_len", [ diff --git a/airbyte-integrations/connectors/source-zendesk-support/unit_tests/unit_test.py b/airbyte-integrations/connectors/source-zendesk-support/unit_tests/unit_test.py index 20847fb78f309e..c5a70147032075 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/unit_tests/unit_test.py +++ b/airbyte-integrations/connectors/source-zendesk-support/unit_tests/unit_test.py @@ -69,6 +69,12 @@ TEST_STREAM = TicketComments(**STREAM_ARGS) +@pytest.fixture(autouse=True) +def time_sleep_mock(mocker): + time_mock = mocker.patch("time.sleep", lambda x: None) + yield time_mock + + def test_str2datetime(): expected = datetime.strptime(DATETIME_STR, DATETIME_FORMAT) output = BaseSourceZendeskSupportStream.str2datetime(DATETIME_STR) diff --git a/docs/integrations/sources/freshdesk.md b/docs/integrations/sources/freshdesk.md index 7e8b2c224f5a11..ee2eeb8d80cb6c 100644 --- a/docs/integrations/sources/freshdesk.md +++ b/docs/integrations/sources/freshdesk.md @@ -51,11 +51,11 @@ Please read [How to find your API key](https://support.freshdesk.com/support/sol ## Changelog -| Version | Date | Pull Request | Subject | -|:--------|:-----------| :--- |:-------------------------------------------------------------------------------| -| 0.2.11 | 2021-12-14 | [8682](https://github.com/airbytehq/airbyte/pull/8682) | Migrate to the CDK | -| 0.2.10 | 2021-12-06 | [8524](https://github.com/airbytehq/airbyte/pull/8524) | Update connector fields title/description | -| 0.2.9 | 2021-11-16 | [8017](https://github.com/airbytehq/airbyte/pull/8017) | Bugfix an issue that caused the connector not to sync more than 50000 contacts | -| 0.2.8 | 2021-10-28 | [7486](https://github.com/airbytehq/airbyte/pull/7486) | Include "requester" and "stats" fields in "tickets" stream | -| 0.2.7 | 2021-10-13 | [6442](https://github.com/airbytehq/airbyte/pull/6442) | Add start_date parameter to specification from which to start pulling data. | +| Version | Date | Pull Request | Subject | +|:--------|:-----------|:---------------------------------------------------------|:-------------------------------------------------------------------------------| +| 0.2.11 | 2021-12-14 | [8682](https://github.com/airbytehq/airbyte/pull/8682) | Migrate to the CDK | +| 0.2.10 | 2021-12-06 | [8524](https://github.com/airbytehq/airbyte/pull/8524) | Update connector fields title/description | +| 0.2.9 | 2021-11-16 | [8017](https://github.com/airbytehq/airbyte/pull/8017) | Bugfix an issue that caused the connector not to sync more than 50000 contacts | +| 0.2.8 | 2021-10-28 | [7486](https://github.com/airbytehq/airbyte/pull/7486) | Include "requester" and "stats" fields in "tickets" stream | +| 0.2.7 | 2021-10-13 | [6442](https://github.com/airbytehq/airbyte/pull/6442) | Add start_date parameter to specification from which to start pulling data. | diff --git a/docs/integrations/sources/mixpanel.md b/docs/integrations/sources/mixpanel.md index 0882c750b282b5..debb8bb54f0295 100644 --- a/docs/integrations/sources/mixpanel.md +++ b/docs/integrations/sources/mixpanel.md @@ -59,6 +59,7 @@ Select the correct region \(EU or US\) for your Mixpanel project. See detail [he | Version | Date | Pull Request | Subject | |:---------|:-----------|:---------------------------------------------------------|:-----------------------------------------------------------------------------------------------------| +| `0.1.13` | 2022-04-27 | [12335](https://github.com/airbytehq/airbyte/pull/12335) | Adding fixtures to mock time.sleep for connectors that explicitly sleep | | `0.1.12` | 2022-03-31 | [11633](https://github.com/airbytehq/airbyte/pull/11633) | Increase unit test coverage | | `0.1.11` | 2022-04-04 | [11318](https://github.com/airbytehq/airbyte/pull/11318) | Change Response Reading | | `0.1.10` | 2022-03-31 | [11227](https://github.com/airbytehq/airbyte/pull/11227) | Fix cohort id always null in the cohort_members stream | diff --git a/docs/integrations/sources/paypal-transaction.md b/docs/integrations/sources/paypal-transaction.md index 55ee494452151e..f633d83e220ace 100644 --- a/docs/integrations/sources/paypal-transaction.md +++ b/docs/integrations/sources/paypal-transaction.md @@ -55,11 +55,12 @@ Transactions sync is performed with default `stream_slice_period` = 1 day, it me ## Changelog -| Version | Date | Pull Request | Subject | -| :--- | :--- | :--- | :--- | -| 0.1.4 | 2021-12-22 | [9034](https://github.com/airbytehq/airbyte/pull/9034) | Update connector fields title/description | -| 0.1.3 | 2021-12-16 | [8580](https://github.com/airbytehq/airbyte/pull/8580) | Added more logs during `check connection` stage | -| 0.1.2 | 2021-11-08 | [7499](https://github.com/airbytehq/airbyte/pull/7499) | Remove base-python dependencies | -| 0.1.1 | 2021-08-03 | [5155](https://github.com/airbytehq/airbyte/pull/5155) | fix start\_date\_min limit | -| 0.1.0 | 2021-06-10 | [4240](https://github.com/airbytehq/airbyte/pull/4240) | PayPal Transaction Search API | +| Version | Date | Pull Request | Subject | +|:--------|:-----------|:---------------------------------------------------------|:------------------------------------------------------------------------| +| 0.1.5 | 2022-04-27 | [12335](https://github.com/airbytehq/airbyte/pull/12335) | Adding fixtures to mock time.sleep for connectors that explicitly sleep | +| 0.1.4 | 2021-12-22 | [9034](https://github.com/airbytehq/airbyte/pull/9034) | Update connector fields title/description | +| 0.1.3 | 2021-12-16 | [8580](https://github.com/airbytehq/airbyte/pull/8580) | Added more logs during `check connection` stage | +| 0.1.2 | 2021-11-08 | [7499](https://github.com/airbytehq/airbyte/pull/7499) | Remove base-python dependencies | +| 0.1.1 | 2021-08-03 | [5155](https://github.com/airbytehq/airbyte/pull/5155) | fix start\_date\_min limit | +| 0.1.0 | 2021-06-10 | [4240](https://github.com/airbytehq/airbyte/pull/4240) | PayPal Transaction Search API | diff --git a/docs/integrations/sources/salesforce.md b/docs/integrations/sources/salesforce.md index 951fb528360ad7..a0987ad172078d 100644 --- a/docs/integrations/sources/salesforce.md +++ b/docs/integrations/sources/salesforce.md @@ -121,7 +121,8 @@ Now that you have set up the Salesforce source connector, check out the followin ## Changelog | Version | Date | Pull Request | Subject | -|:--------|:-----------| :--- |:---------------------------------------------------------------------------------------------------------------------------------| +|:--------|:-----------|:---|:---------------------------------------------------------------------------------------------------------------------------------| +| 1.0.4 | 2022-04-27 | [12335](https://github.com/airbytehq/airbyte/pull/12335) | Adding fixtures to mock time.sleep for connectors that explicitly sleep | | 1.0.3 | 2022-04-04 | [11692](https://github.com/airbytehq/airbyte/pull/11692) | Optimised memory usage for `BULK` API calls | | 1.0.2 | 2022-03-01 | [10751](https://github.com/airbytehq/airbyte/pull/10751) | Fix broken link anchor in connector configuration | | 1.0.1 | 2022-02-27 | [10679](https://github.com/airbytehq/airbyte/pull/10679) | Reorganize input parameter order on the UI | diff --git a/docs/integrations/sources/zendesk-support.md b/docs/integrations/sources/zendesk-support.md index 1376d6c0d07fef..ac4aff49d8c2f1 100644 --- a/docs/integrations/sources/zendesk-support.md +++ b/docs/integrations/sources/zendesk-support.md @@ -72,24 +72,25 @@ The Zendesk connector should not run into Zendesk API limitations under normal u ### CHANGELOG -| Version | Date | Pull Request | Subject | -|:---------|:-----------| :----- |:-------------------------------------------------------| +| Version | Date | Pull Request | Subject | +|:---------|:-----------|:---------------------------------------------------------|:------------------------------------------------------------------------| +| `0.2.7` | 2022-04-27 | [12335](https://github.com/airbytehq/airbyte/pull/12335) | Adding fixtures to mock time.sleep for connectors that explicitly sleep | | `0.2.6` | 2022-04-19 | [12122](https://github.com/airbytehq/airbyte/pull/12122) | Fixed the bug when only 100,000 Users are synced [11895](https://github.com/airbytehq/airbyte/issues/11895) and fixed bug when `start_date` is not used on user stream [12059](https://github.com/airbytehq/airbyte/issues/12059). -| `0.2.5` | 2022-04-05 | [11727](https://github.com/airbytehq/airbyte/pull/11727) | Fixed the bug when state was not parsed correctly -| `0.2.4` | 2022-04-04 | [11688](https://github.com/airbytehq/airbyte/pull/11688) | Small documentation corrections -| `0.2.3` | 2022-03-23 | [11349](https://github.com/airbytehq/airbyte/pull/11349) | Fixed the bug when Tickets stream didn't return deleted records -| `0.2.2` | 2022-03-17 | [11237](https://github.com/airbytehq/airbyte/pull/11237) | Fixed the bug when TicketComments stream didn't return all records -| `0.2.1` | 2022-03-15 | [11162](https://github.com/airbytehq/airbyte/pull/11162) | Added support of OAuth2.0 authentication method -| `0.2.0` | 2022-03-01 | [9456](https://github.com/airbytehq/airbyte/pull/9456) | Update source to use future requests | -| `0.1.12` | 2022-01-25 | [9785](https://github.com/airbytehq/airbyte/pull/9785) | Add additional log messages | -| `0.1.11` | 2021-12-21 | [8987](https://github.com/airbytehq/airbyte/pull/8987) | Update connector fields title/description | -| `0.1.9` | 2021-12-16 | [8616](https://github.com/airbytehq/airbyte/pull/8616) | Adds Brands, CustomRoles and Schedules streams | -| `0.1.8` | 2021-11-23 | [8050](https://github.com/airbytehq/airbyte/pull/8168) | Adds TicketMetricEvents stream | -| `0.1.7` | 2021-11-23 | [8058](https://github.com/airbytehq/airbyte/pull/8058) | Added support of AccessToken authentication | -| `0.1.6` | 2021-11-18 | [8050](https://github.com/airbytehq/airbyte/pull/8050) | Fix wrong types for schemas, add TypeTransformer | -| `0.1.5` | 2021-10-26 | [7679](https://github.com/airbytehq/airbyte/pull/7679) | Add ticket_id and ticket_comments | -| `0.1.4` | 2021-10-26 | [7377](https://github.com/airbytehq/airbyte/pull/7377) | Fix initially_assigned_at type in ticket metrics | -| `0.1.3` | 2021-10-17 | [7097](https://github.com/airbytehq/airbyte/pull/7097) | Corrected the connector's specification | -| `0.1.2` | 2021-10-16 | [6513](https://github.com/airbytehq/airbyte/pull/6513) | Fixed TicketComments stream | -| `0.1.1` | 2021-09-02 | [5787](https://github.com/airbytehq/airbyte/pull/5787) | Fixed incremental logic for the ticket_comments stream | -| `0.1.0` | 2021-07-21 | [4861](https://github.com/airbytehq/airbyte/pull/4861) | Created CDK native zendesk connector | +| `0.2.5` | 2022-04-05 | [11727](https://github.com/airbytehq/airbyte/pull/11727) | Fixed the bug when state was not parsed correctly | +| `0.2.4` | 2022-04-04 | [11688](https://github.com/airbytehq/airbyte/pull/11688) | Small documentation corrections | +| `0.2.3` | 2022-03-23 | [11349](https://github.com/airbytehq/airbyte/pull/11349) | Fixed the bug when Tickets stream didn't return deleted records | +| `0.2.2` | 2022-03-17 | [11237](https://github.com/airbytehq/airbyte/pull/11237) | Fixed the bug when TicketComments stream didn't return all records | +| `0.2.1` | 2022-03-15 | [11162](https://github.com/airbytehq/airbyte/pull/11162) | Added support of OAuth2.0 authentication method | +| `0.2.0` | 2022-03-01 | [9456](https://github.com/airbytehq/airbyte/pull/9456) | Update source to use future requests | +| `0.1.12` | 2022-01-25 | [9785](https://github.com/airbytehq/airbyte/pull/9785) | Add additional log messages | +| `0.1.11` | 2021-12-21 | [8987](https://github.com/airbytehq/airbyte/pull/8987) | Update connector fields title/description | +| `0.1.9` | 2021-12-16 | [8616](https://github.com/airbytehq/airbyte/pull/8616) | Adds Brands, CustomRoles and Schedules streams | +| `0.1.8` | 2021-11-23 | [8050](https://github.com/airbytehq/airbyte/pull/8168) | Adds TicketMetricEvents stream | +| `0.1.7` | 2021-11-23 | [8058](https://github.com/airbytehq/airbyte/pull/8058) | Added support of AccessToken authentication | +| `0.1.6` | 2021-11-18 | [8050](https://github.com/airbytehq/airbyte/pull/8050) | Fix wrong types for schemas, add TypeTransformer | +| `0.1.5` | 2021-10-26 | [7679](https://github.com/airbytehq/airbyte/pull/7679) | Add ticket_id and ticket_comments | +| `0.1.4` | 2021-10-26 | [7377](https://github.com/airbytehq/airbyte/pull/7377) | Fix initially_assigned_at type in ticket metrics | +| `0.1.3` | 2021-10-17 | [7097](https://github.com/airbytehq/airbyte/pull/7097) | Corrected the connector's specification | +| `0.1.2` | 2021-10-16 | [6513](https://github.com/airbytehq/airbyte/pull/6513) | Fixed TicketComments stream | +| `0.1.1` | 2021-09-02 | [5787](https://github.com/airbytehq/airbyte/pull/5787) | Fixed incremental logic for the ticket_comments stream | +| `0.1.0` | 2021-07-21 | [4861](https://github.com/airbytehq/airbyte/pull/4861) | Created CDK native zendesk connector | From e8813ee60c953d35c6d6fc80f01ba53b4bb54bee Mon Sep 17 00:00:00 2001 From: Benoit Moriceau Date: Thu, 28 Apr 2022 13:58:59 -0700 Subject: [PATCH 025/152] Restore jsonPath and fix it (#12325) This restore the Json traversal library. A bug was introduce in the Json path library, the PR fix it. In a json schema we can define an enum without specifying a "type" attribute. It wasn't handle in the previous implemantation. We now return a right type in the getType method and process it the same way than the an integer/boolean/string type. --- .../io/airbyte/commons/json/JsonPaths.java | 22 +- .../io/airbyte/commons/json/JsonSchemas.java | 185 +++++++++- .../airbyte/commons/json/JsonSchemasTest.java | 121 +++++++ .../io/airbyte/commons/json/JsonsTest.java | 18 - .../json_schemas/composite_json_schema.json | 39 +++ .../json_schemas/json_with_all_types.json | 34 ++ .../json_with_array_type_fields.json | 18 + ...ith_array_type_fields_with_composites.json | 32 ++ .../src/main/resources/seed/source_specs.yaml | 4 + .../airbyte/config/init/SpecFormatTest.java | 59 ++++ .../split_secrets/JsonSecretsProcessor.java | 128 ++----- .../split_secrets/SecretsHelpers.java | 202 ++++------- .../JsonSecretsProcessorTest.java | 329 ++++++++++-------- .../split_secrets/SecretsHelpersTest.java | 18 +- .../split_secrets/SecretsTestCase.java | 15 + .../test_cases/ArrayTestCase.java | 12 +- .../test_cases/NestedObjectTestCase.java | 20 +- .../src/test/resources/array/expectedPaths | 1 + .../test/resources/array/partial_config.json | 12 +- .../array/updated_partial_config.json | 12 +- .../src/test/resources/array2/expected.json | 3 + .../test/resources/array2/full_config.json | 3 + .../test/resources/array2/partial_config.json | 7 + .../src/test/resources/array2/spec.json | 20 ++ .../test/resources/array2/update_config.json | 3 + .../array2/updated_partial_config.json | 7 + .../resources/array_of_oneof/expectedPaths | 1 + .../src/test/resources/enum/expected.json | 7 + .../src/test/resources/enum/full_config.json | 9 + .../src/test/resources/enum/spec.json | 296 ++++++++++++++++ .../resources/nested_object/expectedPaths | 1 + .../nested_object/partial_config.json | 4 +- .../nested_object/updated_partial_config.json | 4 +- .../updated_partial_config_update1.json | 4 +- .../updated_partial_config_update2.json | 4 +- .../test/resources/nested_oneof/expectedPaths | 1 + .../src/test/resources/oneof/expectedPaths | 1 + .../resources/optional_password/expectedPaths | 1 + .../resources/postgres_ssh_key/expectedPaths | 1 + .../src/test/resources/simple/expectedPaths | 1 + .../source_freshdesk/spec.json | 1 + .../source-slack/source_slack/spec.json | 4 + .../scheduling/ConnectionUpdaterInput.java | 1 + 43 files changed, 1232 insertions(+), 433 deletions(-) create mode 100644 airbyte-commons/src/test/java/io/airbyte/commons/json/JsonSchemasTest.java create mode 100644 airbyte-commons/src/test/resources/json_schemas/composite_json_schema.json create mode 100644 airbyte-commons/src/test/resources/json_schemas/json_with_all_types.json create mode 100644 airbyte-commons/src/test/resources/json_schemas/json_with_array_type_fields.json create mode 100644 airbyte-commons/src/test/resources/json_schemas/json_with_array_type_fields_with_composites.json create mode 100644 airbyte-config/init/src/test/java/io/airbyte/config/init/SpecFormatTest.java create mode 100644 airbyte-config/persistence/src/test/resources/array/expectedPaths create mode 100644 airbyte-config/persistence/src/test/resources/array2/expected.json create mode 100644 airbyte-config/persistence/src/test/resources/array2/full_config.json create mode 100644 airbyte-config/persistence/src/test/resources/array2/partial_config.json create mode 100644 airbyte-config/persistence/src/test/resources/array2/spec.json create mode 100644 airbyte-config/persistence/src/test/resources/array2/update_config.json create mode 100644 airbyte-config/persistence/src/test/resources/array2/updated_partial_config.json create mode 100644 airbyte-config/persistence/src/test/resources/array_of_oneof/expectedPaths create mode 100644 airbyte-config/persistence/src/test/resources/enum/expected.json create mode 100644 airbyte-config/persistence/src/test/resources/enum/full_config.json create mode 100644 airbyte-config/persistence/src/test/resources/enum/spec.json create mode 100644 airbyte-config/persistence/src/test/resources/nested_object/expectedPaths create mode 100644 airbyte-config/persistence/src/test/resources/nested_oneof/expectedPaths create mode 100644 airbyte-config/persistence/src/test/resources/oneof/expectedPaths create mode 100644 airbyte-config/persistence/src/test/resources/optional_password/expectedPaths create mode 100644 airbyte-config/persistence/src/test/resources/postgres_ssh_key/expectedPaths create mode 100644 airbyte-config/persistence/src/test/resources/simple/expectedPaths diff --git a/airbyte-commons/src/main/java/io/airbyte/commons/json/JsonPaths.java b/airbyte-commons/src/main/java/io/airbyte/commons/json/JsonPaths.java index 428299aa958b4b..0b75b981500dd4 100644 --- a/airbyte-commons/src/main/java/io/airbyte/commons/json/JsonPaths.java +++ b/airbyte-commons/src/main/java/io/airbyte/commons/json/JsonPaths.java @@ -42,15 +42,15 @@ * returning a list for query results. In addition, we provide helper functions that will just * return a single value (see: {@link JsonPaths#getSingleValue(JsonNode, String)}). These should * only be used if it is not possible for a query to return more than one value. - * - * Note: Package private as most uses of JsonPaths seems like they can be hidden inside other - * commons libraries (i.e. Jsons and JsonsSchemas). If this assumption proves incorrect, we can open - * it up. */ -class JsonPaths { +public class JsonPaths { private static final Logger LOGGER = LoggerFactory.getLogger(JsonPaths.class); + static final String JSON_PATH_START_CHARACTER = "$"; + static final String JSON_PATH_LIST_SPLAT = "[*]"; + static final String JSON_PATH_FIELD_SEPARATOR = "."; + // set default configurations at start up to match our JSON setup. static { Configuration.setDefaults(new Configuration.Defaults() { @@ -82,6 +82,18 @@ public Set

@@ -32,13 +32,19 @@ const ResetPasswordPage: React.FC = () => { email: "", }} validationSchema={ResetPasswordPageValidationSchema} - onSubmit={async ({ email }) => { - await requirePasswordReset(email); - registerNotification({ - id: "resetPassword.emailSent", - title: formatMessage({ id: "login.resetPassword.emailSent" }), - isError: false, - }); + onSubmit={async ({ email }, FormikBag) => { + try { + await requirePasswordReset(email); + registerNotification({ + id: "resetPassword.emailSent", + title: formatMessage({ id: "login.resetPassword.emailSent" }), + isError: false, + }); + } catch (err) { + err.message.includes("user-not-found") + ? FormikBag.setFieldError("email", "login.yourEmail.notFound") + : FormikBag.setFieldError("email", "login.unknownError"); + } }} validateOnBlur={true} validateOnChange={false} From fd7f21e07170fbc9cb6a22d03b642486d0da0b52 Mon Sep 17 00:00:00 2001 From: oneshcheret <33333155+sashaNeshcheret@users.noreply.github.com> Date: Fri, 29 Apr 2022 18:17:13 +0300 Subject: [PATCH 037/152] S3 destination: updating docs regarding certification (#11966) * S3 destination: updating docs regarding certification * S3 destination: updating docs by new template * Apply suggestions from code review Co-authored-by: Andy * S3 destination: updating docs by new template Co-authored-by: Andy --- docs/integrations/destinations/s3.md | 167 +++++++++++++++++---------- 1 file changed, 107 insertions(+), 60 deletions(-) diff --git a/docs/integrations/destinations/s3.md b/docs/integrations/destinations/s3.md index facd3f56c66b56..1e4552aac1dfb4 100644 --- a/docs/integrations/destinations/s3.md +++ b/docs/integrations/destinations/s3.md @@ -1,36 +1,99 @@ # S3 -## Features +This page guides you through the process of setting up the S3 destination connector. -| Feature | Support | Notes | -| :--- | :---: | :--- | -| Full Refresh Sync | ✅ | Warning: this mode deletes all previously synced data in the configured bucket path. | -| Incremental - Append Sync | ✅ | | -| Incremental - Deduped History | ❌ | As this connector does not support dbt, we don't support this sync mode on this destination. | -| Namespaces | ❌ | Setting a specific bucket path is equivalent to having separate namespaces. | +## Prerequisites -The Airbyte S3 destination allows you to sync data to AWS S3 or Minio S3. Each stream is written to its own directory under the bucket. - -## Troubleshooting - -Check out common troubleshooting issues for the S3 destination connector on our Discourse [here](https://discuss.airbyte.io/tags/c/connector/11/destination-s3). +List of required fields: +* **Access Key ID** +* **Secret Access Key** +* **S3 Bucket Name** +* **S3 Bucket Path** +* **S3 Bucket Region** -## Configuration +1. Allow connections from Airbyte server to your AWS S3/ Minio S3 cluster \(if they exist in separate VPCs\). +2. An S3 bucket with credentials or an instanceprofile with read/write permissions configured for the host (ec2, eks). -| Parameter | Type | Notes | -| :--- | :---: | :--- | -| S3 Endpoint | string | URL to S3, If using AWS S3 just leave blank. | -| S3 Bucket Name | string | Name of the bucket to sync data into. | -| S3 Bucket Path | string | Subdirectory under the above bucket to sync the data into. | -| S3 Bucket Format | string | Additional string format on how to store data under S3 Bucket Path. Default value is `${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_`. | -| S3 Region | string | See [here](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/using-regions-availability-zones.html#concepts-available-regions) for all region codes. | -| Access Key ID | string | AWS/Minio credential. | -| Secret Access Key | string | AWS/Minio credential. | -| Format | object | Format specific configuration. See the [spec](/airbyte-integrations/connectors/destination-s3/src/main/resources/spec.json) for details. | +## Step 1: Set up S3 + +[Sign in](https://signin.aws.amazon.com/signin) to your AWS account. +Use an existing or create new [Access Key ID and Secret Access Key](https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html#:~:text=IAM%20User%20Guide.-,Programmatic%20access,-You%20must%20provide). + +Prepare S3 bucket that will be used as destination, see [this](https://docs.aws.amazon.com/AmazonS3/latest/userguide/create-bucket-overview.html) to create an S3 bucket. + +## Step 2: Set up the S3 destination connector in Airbyte + +**For Airbyte Cloud:** + +1. [Log into your Airbyte Cloud](https://cloud.airbyte.io/workspaces) account. +2. In the left navigation bar, click **Destinations**. In the top-right corner, click **+ new destination**. +3. On the destination setup page, select **S3** from the Destination type dropdown and enter a name for this connector. +4. Configure fields: + * **Access Key Id** + * See [this](https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html#access-keys-and-secret-access-keys) on how to generate an access key. + * We recommend creating an Airbyte-specific user. This user will require [read and write permissions](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_examples_s3_rw-bucket.html) to objects in the bucket. + * **Secret Access Key** + * Corresponding key to the above key id. + * **S3 Bucket Name** + * See [this](https://docs.aws.amazon.com/AmazonS3/latest/userguide/create-bucket-overview.html) to create an S3 bucket. + * **S3 Bucket Path** + * Subdirectory under the above bucket to sync the data into. + * **S3 Bucket Region** + * See [here](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/using-regions-availability-zones.html#concepts-available-regions) for all region codes. + * **S3 Path Format** + * Additional string format on how to store data under S3 Bucket Path. Default value is `${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_`. + * **S3 Endpoint** + * Leave empty if using AWS S3, fill in S3 URL if using Minio S3. +5. Click `Set up destination`. + +**For Airbyte OSS:** + +1. Go to local Airbyte page. +2. In the left navigation bar, click **Destinations**. In the top-right corner, click **+ new destination**. +3. On the destination setup page, select **S3** from the Destination type dropdown and enter a name for this connector. +4. Configure fields: + * **Access Key Id** + * See [this](https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html#access-keys-and-secret-access-keys) on how to generate an access key. + * See [this](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use_switch-role-ec2_instance-profiles.html) on how to create a instanceprofile. + * We recommend creating an Airbyte-specific user. This user will require [read and write permissions](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_examples_s3_rw-bucket.html) to objects in the staging bucket. + * If the Access Key and Secret Access Key are not provided, the authentication will rely on the instanceprofile. + * **Secret Access Key** + * Corresponding key to the above key id. + * Make sure your S3 bucket is accessible from the machine running Airbyte. + * This depends on your networking setup. + * You can check AWS S3 documentation with a tutorial on how to properly configure your S3's access [here](https://docs.aws.amazon.com/AmazonS3/latest/userguide/access-control-overview.html). + * If you use instance profile authentication, make sure the role has permission to read/write on the bucket. + * The easiest way to verify if Airbyte is able to connect to your S3 bucket is via the check connection tool in the UI. + * **S3 Bucket Name** + * See [this](https://docs.aws.amazon.com/AmazonS3/latest/userguide/create-bucket-overview.html) to create an S3 bucket. + * **S3 Bucket Path** + * Subdirectory under the above bucket to sync the data into. + * **S3 Bucket Region** + * See [here](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/using-regions-availability-zones.html#concepts-available-regions) for all region codes. + * **S3 Path Format** + * Additional string format on how to store data under S3 Bucket Path. Default value is `${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_`. + * **S3 Endpoint** + * Leave empty if using AWS S3, fill in S3 URL if using Minio S3. +5. Click `Set up destination`. -⚠️ Please note that under "Full Refresh Sync" mode, data in the configured bucket and path will be wiped out before each sync. We recommend you to provision a dedicated S3 resource for this sync to prevent unexpected data deletion from misconfiguration. ⚠️ +In order for everything to work correctly, it is also necessary that the user whose "S3 Key Id" and "S3 Access Key" are used have access to both the bucket and its contents. Policies to use: +```json +{ + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Action": "s3:*", + "Resource": [ + "arn:aws:s3:::YOUR_BUCKET_NAME/*", + "arn:aws:s3:::YOUR_BUCKET_NAME" + ] + } + ] +} +``` -The full path of the output data with the default S3 path format `${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_` is: +The full path of the output data with the default S3 Path Format `${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_` is: ```text ///__. @@ -70,14 +133,26 @@ But it is possible to further customize by using the available variables to form - `${EPOCH}`: Milliseconds since Epoch in which the sync was writing the output data in. - `${UUID}`: random uuid string -Note: -- Multiple `/` characters in the S3 path are collapsed into a single `/` character. +Note: +- Multiple `/` characters in the S3 path are collapsed into a single `/` character. - If the output bucket contains too many files, the part id variable is using a `UUID` instead. It uses sequential ID otherwise. Please note that the stream name may contain a prefix, if it is configured on the connection. -A data sync may create multiple files as the output files can be partitioned by size (targeting a size of 200MB compressed or lower) . +A data sync may create multiple files as the output files can be partitioned by size (targeting a size of 200MB compressed or lower) . -## Output Schema +## Supported sync modes + +| Feature | Support | Notes | +| :--- | :---: | :--- | +| Full Refresh Sync | ✅ | Warning: this mode deletes all previously synced data in the configured bucket path. | +| Incremental - Append Sync | ✅ | | +| Incremental - Deduped History | ❌ | As this connector does not support dbt, we don't support this sync mode on this destination. | +| Namespaces | ❌ | Setting a specific bucket path is equivalent to having separate namespaces. | + +The Airbyte S3 destination allows you to sync data to AWS S3 or Minio S3. Each stream is written to its own directory under the bucket. +⚠️ Please note that under "Full Refresh Sync" mode, data in the configured bucket and path will be wiped out before each sync. We recommend you to provision a dedicated S3 resource for this sync to prevent unexpected data deletion from misconfiguration. ⚠️ + +## Supported Output schema Each stream will be outputted to its dedicated directory according to the configuration. The complete datastore of each stream includes all the output files under that directory. You can think of the directory as equivalent of a Table in the database world. @@ -117,7 +192,7 @@ Here is the available compression codecs: #### Data schema -Under the hood, an Airbyte data stream in Json schema is first converted to an Avro schema, then the Json object is converted to an Avro record. Because the data stream can come from any data source, the Json to Avro conversion process has arbitrary rules and limitations. Learn more about how source data is converted to Avro and the current limitations [here](https://docs.airbyte.io/understanding-airbyte/json-avro-conversion). +Under the hood, an Airbyte data stream in JSON schema is first converted to an Avro schema, then the JSON object is converted to an Avro record. Because the data stream can come from any data source, the JSON to Avro conversion process has arbitrary rules and limitations. Learn more about how source data is converted to Avro and the current limitations [here](https://docs.airbyte.io/understanding-airbyte/json-avro-conversion). ### CSV @@ -158,7 +233,7 @@ Output files can be compressed. The default option is GZIP compression. If compr ### JSON Lines \(JSONL\) -[Json Lines](https://jsonlines.org/) is a text format with one JSON per line. Each line has a structure as follows: +[JSON Lines](https://jsonlines.org/) is a text format with one JSON per line. Each line has a structure as follows: ```json { @@ -217,35 +292,7 @@ These parameters are related to the `ParquetOutputFormat`. See the [Java doc](ht #### Data schema -Under the hood, an Airbyte data stream in Json schema is first converted to an Avro schema, then the Json object is converted to an Avro record, and finally the Avro record is outputted to the Parquet format. Because the data stream can come from any data source, the Json to Avro conversion process has arbitrary rules and limitations. Learn more about how source data is converted to Avro and the current limitations [here](https://docs.airbyte.io/understanding-airbyte/json-avro-conversion). - -## Getting Started \(Airbyte Open-Source / Airbyte Cloud\) - -#### Requirements - -1. Allow connections from Airbyte server to your AWS S3/ Minio S3 cluster \(if they exist in separate VPCs\). -2. An S3 bucket with credentials or an instanceprofile with read/write permissions configured for the host (ec2, eks). - -#### Setup Guide - -* Fill up S3 info - * **S3 Endpoint** - * Leave empty if using AWS S3, fill in S3 URL if using Minio S3. - * **S3 Bucket Name** - * See [this](https://docs.aws.amazon.com/AmazonS3/latest/userguide/create-bucket-overview.html) to create an S3 bucket. - * **S3 Bucket Region** - * **Access Key Id** - * See [this](https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html#access-keys-and-secret-access-keys) on how to generate an access key. - * See [this](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use_switch-role-ec2_instance-profiles.html) on how to create a instanceprofile. - * We recommend creating an Airbyte-specific user. This user will require [read and write permissions](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_examples_s3_rw-bucket.html) to objects in the staging bucket. - * If the Access Key and Secret Access Key are not provided, the authentication will rely on the instanceprofile. - * **Secret Access Key** - * Corresponding key to the above key id. -* Make sure your S3 bucket is accessible from the machine running Airbyte. - * This depends on your networking setup. - * You can check AWS S3 documentation with a tutorial on how to properly configure your S3's access [here](https://docs.aws.amazon.com/AmazonS3/latest/userguide/access-control-overview.html). - * If you will use instance profile authentication, make sure the role has permission to read/write on the bucket. - * The easiest way to verify if Airbyte is able to connect to your S3 bucket is via the check connection tool in the UI. +Under the hood, an Airbyte data stream in JSON schema is first converted to an Avro schema, then the JSON object is converted to an Avro record, and finally the Avro record is outputted to the Parquet format. Because the data stream can come from any data source, the JSON to Avro conversion process has arbitrary rules and limitations. Learn more about how source data is converted to Avro and the current limitations [here](https://docs.airbyte.io/understanding-airbyte/json-avro-conversion). In order for everything to work correctly, it is also necessary that the user whose "S3 Key Id" and "S3 Access Key" are used have access to both the bucket and its contents. Policies to use: ```json From ceec454e30ad531dcc306a847d46b0fe4462e208 Mon Sep 17 00:00:00 2001 From: Brian Lai <51336873+brianjlai@users.noreply.github.com> Date: Fri, 29 Apr 2022 13:44:04 -0700 Subject: [PATCH 038/152] update python cdk tutorial with updates to exchange rates api (#12427) * update python cdk tutorial with updates to exchange rates api * remove extra comment in source.py tutorial sample code --- .../configured_catalog.json | 3 ++ .../exchange_rates.json | 3 ++ .../sample_files/config.json | 2 +- .../sample_files/configured_catalog.json | 3 ++ .../sample_files/invalid_config.json | 2 +- .../schemas/exchange_rates.json | 3 ++ .../source_python_http_tutorial/source.py | 16 ++++--- .../source_python_http_tutorial/spec.json | 5 ++ .../0-getting-started.md | 4 ++ .../3-define-inputs.md | 5 ++ .../4-connection-checking.md | 19 ++++---- .../5-declare-schema.md | 4 +- .../cdk-tutorial-python-http/6-read-data.md | 46 ++++++++++--------- 13 files changed, 72 insertions(+), 43 deletions(-) diff --git a/airbyte-cdk/python/docs/tutorials/http_api_source_assets/configured_catalog.json b/airbyte-cdk/python/docs/tutorials/http_api_source_assets/configured_catalog.json index 66ab9be9e7bb52..7aa9a7e9b2229c 100644 --- a/airbyte-cdk/python/docs/tutorials/http_api_source_assets/configured_catalog.json +++ b/airbyte-cdk/python/docs/tutorials/http_api_source_assets/configured_catalog.json @@ -7,6 +7,9 @@ "$schema": "http://json-schema.org/draft-04/schema#", "type": "object", "properties": { + "access_key": { + "type": "string" + }, "base": { "type": "string" }, diff --git a/airbyte-cdk/python/docs/tutorials/http_api_source_assets/exchange_rates.json b/airbyte-cdk/python/docs/tutorials/http_api_source_assets/exchange_rates.json index 7476b088094e2d..9462ce0079e6ef 100644 --- a/airbyte-cdk/python/docs/tutorials/http_api_source_assets/exchange_rates.json +++ b/airbyte-cdk/python/docs/tutorials/http_api_source_assets/exchange_rates.json @@ -2,6 +2,9 @@ "type": "object", "required": ["base", "date", "rates"], "properties": { + "access_key": { + "type": "string" + }, "base": { "type": "string" }, diff --git a/airbyte-integrations/connectors/source-python-http-tutorial/sample_files/config.json b/airbyte-integrations/connectors/source-python-http-tutorial/sample_files/config.json index 2e4bbdfb3b61dd..4a9d0b46c3bf09 100644 --- a/airbyte-integrations/connectors/source-python-http-tutorial/sample_files/config.json +++ b/airbyte-integrations/connectors/source-python-http-tutorial/sample_files/config.json @@ -1 +1 @@ -{ "start_date": "2021-04-01", "base": "USD" } +{ "start_date": "2021-04-01", "base": "USD", "access_key": "abcdef" } diff --git a/airbyte-integrations/connectors/source-python-http-tutorial/sample_files/configured_catalog.json b/airbyte-integrations/connectors/source-python-http-tutorial/sample_files/configured_catalog.json index 8c34f50528be39..c42547264dd573 100644 --- a/airbyte-integrations/connectors/source-python-http-tutorial/sample_files/configured_catalog.json +++ b/airbyte-integrations/connectors/source-python-http-tutorial/sample_files/configured_catalog.json @@ -7,6 +7,9 @@ "$schema": "http://json-schema.org/draft-04/schema#", "type": "object", "properties": { + "access_key": { + "type": "string" + }, "base": { "type": "string" }, diff --git a/airbyte-integrations/connectors/source-python-http-tutorial/sample_files/invalid_config.json b/airbyte-integrations/connectors/source-python-http-tutorial/sample_files/invalid_config.json index 779b9ee5d1e616..9daf8e2f3fe7ec 100644 --- a/airbyte-integrations/connectors/source-python-http-tutorial/sample_files/invalid_config.json +++ b/airbyte-integrations/connectors/source-python-http-tutorial/sample_files/invalid_config.json @@ -1 +1 @@ -{ "start_date": "2021-04-01", "base": "BTC" } +{ "start_date": "2021-04-01", "base": "BTC", "access_key": "abcdef" } diff --git a/airbyte-integrations/connectors/source-python-http-tutorial/source_python_http_tutorial/schemas/exchange_rates.json b/airbyte-integrations/connectors/source-python-http-tutorial/source_python_http_tutorial/schemas/exchange_rates.json index 80b47d0eeeee44..84b6325ce5d24b 100644 --- a/airbyte-integrations/connectors/source-python-http-tutorial/source_python_http_tutorial/schemas/exchange_rates.json +++ b/airbyte-integrations/connectors/source-python-http-tutorial/source_python_http_tutorial/schemas/exchange_rates.json @@ -2,6 +2,9 @@ "$schema": "http://json-schema.org/draft-04/schema#", "type": "object", "properties": { + "access_key": { + "type": "string" + }, "base": { "type": "string" }, diff --git a/airbyte-integrations/connectors/source-python-http-tutorial/source_python_http_tutorial/source.py b/airbyte-integrations/connectors/source-python-http-tutorial/source_python_http_tutorial/source.py index 2f8c75c5e74c98..76b6af1f6eced6 100644 --- a/airbyte-integrations/connectors/source-python-http-tutorial/source_python_http_tutorial/source.py +++ b/airbyte-integrations/connectors/source-python-http-tutorial/source_python_http_tutorial/source.py @@ -14,14 +14,16 @@ class ExchangeRates(HttpStream): - url_base = "https://api.exchangeratesapi.io/" + url_base = "http://api.exchangeratesapi.io/" cursor_field = "date" primary_key = "date" - def __init__(self, base: str, start_date: datetime, **kwargs): - super().__init__(**kwargs) - self.base = base + def __init__(self, config: Mapping[str, Any], start_date: datetime, **kwargs): + super().__init__() + self.base = config["base"] + self.access_key = config["access_key"] self.start_date = start_date + self._cursor_value = None def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: # The API does not offer pagination, so we return None to indicate there are no more pages in the response @@ -38,8 +40,8 @@ def request_params( stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None, ) -> MutableMapping[str, Any]: - # The api requires that we include the base currency as a query param so we do that in this method - return {"base": self.base} + # The api requires that we include access_key as a query param so we do that in this method + return {"access_key": self.access_key} def parse_response( self, @@ -104,4 +106,4 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: auth = NoAuth() # Parse the date from a string into a datetime object start_date = datetime.strptime(config["start_date"], "%Y-%m-%d") - return [ExchangeRates(authenticator=auth, base=config["base"], start_date=start_date)] + return [ExchangeRates(authenticator=auth, config=config, start_date=start_date)] diff --git a/airbyte-integrations/connectors/source-python-http-tutorial/source_python_http_tutorial/spec.json b/airbyte-integrations/connectors/source-python-http-tutorial/source_python_http_tutorial/spec.json index c62b4b93f01428..94f00e9b0e1682 100644 --- a/airbyte-integrations/connectors/source-python-http-tutorial/source_python_http_tutorial/spec.json +++ b/airbyte-integrations/connectors/source-python-http-tutorial/source_python_http_tutorial/spec.json @@ -7,6 +7,11 @@ "required": ["start_date", "base"], "additionalProperties": false, "properties": { + "access_key": { + "title": "Access Key", + "type": "string", + "description": "API access key used to retrieve data from the Exchange Rates API." + }, "start_date": { "title": "Start Date", "type": "string", diff --git a/docs/connector-development/tutorials/cdk-tutorial-python-http/0-getting-started.md b/docs/connector-development/tutorials/cdk-tutorial-python-http/0-getting-started.md index a1bc7b2227253a..6a813f32895a79 100644 --- a/docs/connector-development/tutorials/cdk-tutorial-python-http/0-getting-started.md +++ b/docs/connector-development/tutorials/cdk-tutorial-python-http/0-getting-started.md @@ -12,6 +12,10 @@ This is a step-by-step guide for how to create an Airbyte source in Python to re All the commands below assume that `python` points to a version of python >=3.9.0. On some systems, `python` points to a Python2 installation and `python3` points to Python3. If this is the case on your machine, substitute all `python` commands in this guide with `python3`. +## Exchange Rates API Setup + +For this guide we will be making API calls to the Exchange Rates API. In order to generate the API access key that will be used by the new connector, you will have to follow steps on the [Exchange Rates API](https://exchangeratesapi.io/) by signing up for the Free tier plan. Once you have an API access key, you can continue with the guide. + ## Checklist * Step 1: Create the source using the template diff --git a/docs/connector-development/tutorials/cdk-tutorial-python-http/3-define-inputs.md b/docs/connector-development/tutorials/cdk-tutorial-python-http/3-define-inputs.md index e4df89230e2016..7c30fb505fd954 100644 --- a/docs/connector-development/tutorials/cdk-tutorial-python-http/3-define-inputs.md +++ b/docs/connector-development/tutorials/cdk-tutorial-python-http/3-define-inputs.md @@ -17,10 +17,14 @@ connectionSpecification: title: Python Http Tutorial Spec type: object required: + - access_key - start_date - base additionalProperties: false properties: + access_key: + type: string + description: API access key used to retrieve data from the Exchange Rates API. start_date: type: string description: Start getting data from that date. @@ -37,6 +41,7 @@ connectionSpecification: In addition to metadata, we define two inputs: +* `access_key`: The API access key used to authenticate requests to the API * `start_date`: The beginning date to start tracking currency exchange rates from * `base`: The currency whose rates we're interested in tracking diff --git a/docs/connector-development/tutorials/cdk-tutorial-python-http/4-connection-checking.md b/docs/connector-development/tutorials/cdk-tutorial-python-http/4-connection-checking.md index 367dcef39ce8fd..b07f7cc67ce204 100644 --- a/docs/connector-development/tutorials/cdk-tutorial-python-http/4-connection-checking.md +++ b/docs/connector-development/tutorials/cdk-tutorial-python-http/4-connection-checking.md @@ -4,6 +4,7 @@ The second operation in the Airbyte Protocol that we'll implement is the `check` This operation verifies that the input configuration supplied by the user can be used to connect to the underlying data source. Note that this user-supplied configuration has the values described in the `spec.yaml` filled in. In other words if the `spec.yaml` said that the source requires a `username` and `password` the config object might be `{ "username": "airbyte", "password": "password123" }`. You should then implement something that returns a json object reporting, given the credentials in the config, whether we were able to connect to the source. +In order to make requests the API, we need to specify the access In our case, this is a fairly trivial check since the API requires no credentials. Instead, let's verify that the user-input `base` currency is a legitimate currency. In `source.py` we'll find the following autogenerated source: ```python @@ -37,24 +38,22 @@ Following the docstring instructions, we'll change the implementation to verify return True, None ``` -Let's test out this implementation by creating two objects: a valid and an invalid config and attempt to give them as input to the connector +Let's test out this implementation by creating two objects: a valid and an invalid config and attempt to give them as input to the connector. For this section, you will need to take the API access key generated earlier and add it to both configs. Because these configs contain secrets, we recommend storing configs which contain secrets in `secrets/config.json` because the `secrets` directory is gitignored by default. ```text -echo '{"start_date": "2021-04-01", "base": "USD"}' > sample_files/config.json -echo '{"start_date": "2021-04-01", "base": "BTC"}' > sample_files/invalid_config.json -python main.py check --config sample_files/config.json -python main.py check --config sample_files/invalid_config.json +mkdir sample_files +echo '{"start_date": "2022-04-01", "base": "USD", "access_key": }' > secrets/config.json +echo '{"start_date": "2022-04-01", "base": "BTC", "access_key": }' > secrets/invalid_config.json +python main.py check --config secrets/config.json +python main.py check --config secrets/invalid_config.json ``` You should see output like the following: ```text -> python main.py check --config sample_files/config.json +> python main.py check --config secrets/config.json {"type": "CONNECTION_STATUS", "connectionStatus": {"status": "SUCCEEDED"}} -> python main.py check --config sample_files/invalid_config.json +> python main.py check --config secrets/invalid_config.json {"type": "CONNECTION_STATUS", "connectionStatus": {"status": "FAILED", "message": "Input currency BTC is invalid. Please input one of the following currencies: {'DKK', 'USD', 'CZK', 'BGN', 'JPY'}"}} ``` - -While developing, we recommend storing configs which contain secrets in `secrets/config.json` because the `secrets` directory is gitignored by default. - diff --git a/docs/connector-development/tutorials/cdk-tutorial-python-http/5-declare-schema.md b/docs/connector-development/tutorials/cdk-tutorial-python-http/5-declare-schema.md index 3ddc5ab64ce266..59c3c663932208 100644 --- a/docs/connector-development/tutorials/cdk-tutorial-python-http/5-declare-schema.md +++ b/docs/connector-development/tutorials/cdk-tutorial-python-http/5-declare-schema.md @@ -10,7 +10,7 @@ We'll begin by creating a stream to represent the data that we're pulling from t ```python class ExchangeRates(HttpStream): - url_base = "https://api.exchangeratesapi.io/" + url_base = "http://api.exchangeratesapi.io/" # Set this as a noop. primary_key = None @@ -60,7 +60,7 @@ Having created this stream in code, we'll put a file `exchange_rates.json` in th With `.json` schema file in place, let's see if the connector can now find this schema and produce a valid catalog: ```text -python main.py discover --config sample_files/config.json +python main.py discover --config secrets/config.json ``` you should see some output like: diff --git a/docs/connector-development/tutorials/cdk-tutorial-python-http/6-read-data.md b/docs/connector-development/tutorials/cdk-tutorial-python-http/6-read-data.md index 2ff984deeebd49..755118c859d57a 100644 --- a/docs/connector-development/tutorials/cdk-tutorial-python-http/6-read-data.md +++ b/docs/connector-development/tutorials/cdk-tutorial-python-http/6-read-data.md @@ -36,13 +36,14 @@ Let's begin by pulling data for the last day's rates by using the `/latest` endp ```python class ExchangeRates(HttpStream): - url_base = "https://api.exchangeratesapi.io/" + url_base = "http://api.exchangeratesapi.io/" primary_key = None - def __init__(self, base: str, **kwargs): + def __init__(self, config: Mapping[str, Any], **kwargs): super().__init__() - self.base = base + self.base = config['base'] + self.access_key = config['access_key'] def path( @@ -60,8 +61,8 @@ class ExchangeRates(HttpStream): stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None, ) -> MutableMapping[str, Any]: - # The api requires that we include the base currency as a query param so we do that in this method - return {'base': self.base} + # The api requires that we include access_key as a query param so we do that in this method + return {'access_key': self.access_key} def parse_response( self, @@ -80,14 +81,14 @@ class ExchangeRates(HttpStream): return None ``` -This may look big, but that's just because there are lots of \(unused, for now\) parameters in these methods \(those can be hidden with Python's `**kwargs`, but don't worry about it for now\). Really we just added a few lines of "significant" code: 1. Added a constructor `__init__` which stores the `base` currency to query for. 2. `return {'base': self.base}` to add the `?base=` query parameter to the request based on the `base` input by the user. 3. `return [response.json()]` to parse the response from the API to match the schema of our schema `.json` file. 4. `return "latest"` to indicate that we want to hit the `/latest` endpoint of the API to get the latest exchange rate data. +This may look big, but that's just because there are lots of \(unused, for now\) parameters in these methods \(those can be hidden with Python's `**kwargs`, but don't worry about it for now\). Really we just added a few lines of "significant" code: 1. Added a constructor `__init__` which stores the `base` currency to query for and the `access_key` used for authentication. 2. `return {'access_key': self.access_key}` to add the `?access_key=` query parameter to the request based on the `access_key` input by the user. 3. `return [response.json()]` to parse the response from the API to match the schema of our schema `.json` file. 4. `return "latest"` to indicate that we want to hit the `/latest` endpoint of the API to get the latest exchange rate data. -Let's also pass the `base` parameter input by the user to the stream class: +Let's also pass the config specified by the user to the stream class: ```python def streams(self, config: Mapping[str, Any]) -> List[Stream]: - auth = NoAuth() - return [ExchangeRates(authenticator=auth, base=config['base'])] + auth = NoAuth() + return [ExchangeRates(authenticator=auth, config=config)] ``` We're now ready to query the API! @@ -95,13 +96,13 @@ We're now ready to query the API! To do this, we'll need a [ConfiguredCatalog](../../../understanding-airbyte/beginners-guide-to-catalog.md). We've prepared one [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-cdk/python/docs/tutorials/http_api_source_assets/configured_catalog.json) -- download this and place it in `sample_files/configured_catalog.json`. Then run: ```text - python main.py read --config sample_files/config.json --catalog sample_files/configured_catalog.json + python main.py read --config secrets/config.json --catalog sample_files/configured_catalog.json ``` you should see some output lines, one of which is a record from the API: ```text -{"type": "RECORD", "record": {"stream": "exchange_rates", "data": {"base": "USD", "rates": {"GBP": 0.7196938353, "HKD": 7.7597848573, "IDR": 14482.4824162185, "ILS": 3.2412081092, "DKK": 6.1532478279, "INR": 74.7852709971, "CHF": 0.915763343, "MXN": 19.8439387671, "CZK": 21.3545717832, "SGD": 1.3261894911, "THB": 31.4398014067, "HRK": 6.2599917253, "EUR": 0.8274720728, "MYR": 4.0979726934, "NOK": 8.3043442284, "CNY": 6.4856433595, "BGN": 1.61836988, "PHP": 48.3516756309, "PLN": 3.770872983, "ZAR": 14.2690111709, "CAD": 1.2436905254, "ISK": 124.9482829954, "BRL": 5.4526272238, "RON": 4.0738932561, "NZD": 1.3841125362, "TRY": 8.3101365329, "JPY": 108.0182043856, "RUB": 74.9555647497, "KRW": 1111.7583781547, "USD": 1.0, "AUD": 1.2840711626, "HUF": 300.6206040546, "SEK": 8.3829540753}, "date": "2021-04-26"}, "emitted_at": 1619498062000}} +"type": "RECORD", "record": {"stream": "exchange_rates", "data": {"success": true, "timestamp": 1651129443, "base": "EUR", "date": "2022-04-28", "rates": {"AED": 3.86736, "AFN": 92.13195, "ALL": 120.627843, "AMD": 489.819318, "ANG": 1.910347, "AOA": 430.073735, "ARS": 121.119674, "AUD": 1.478877, "AWG": 1.895762, "AZN": 1.794932, "BAM": 1.953851, "BBD": 2.140212, "BDT": 91.662775, "BGN": 1.957013, "BHD": 0.396929, "BIF": 2176.669098, "BMD": 1.052909, "BND": 1.461004, "BOB": 7.298009, "BRL": 5.227798, "BSD": 1.060027, "BTC": 2.6717761e-05, "BTN": 81.165435, "BWP": 12.802036, "BYN": 3.565356, "BYR": 20637.011334, "BZD": 2.136616, "CAD": 1.349329, "CDF": 2118.452361, "CHF": 1.021627, "CLF": 0.032318, "CLP": 891.760584, "CNY": 6.953724, "COP": 4171.971894, "CRC": 701.446322, "CUC": 1.052909, "CUP": 27.902082, "CVE": 110.15345, "CZK": 24.499027, "DJF": 188.707108, "DKK": 7.441548, "DOP": 58.321493, "DZD": 152.371647, "EGP": 19.458297, "ERN": 15.793633, "ETB": 54.43729, "EUR": 1, "FJD": 2.274651, "FKP": 0.80931, "GBP": 0.839568, "GEL": 3.20611, "GGP": 0.80931, "GHS": 7.976422, "GIP": 0.80931, "GMD": 56.64554, "GNF": 9416.400803, "GTQ": 8.118402, "GYD": 221.765423, "HKD": 8.261854, "HNL": 26.0169, "HRK": 7.563467, "HTG": 115.545574, "HUF": 377.172734, "IDR": 15238.748216, "ILS": 3.489582, "IMP": 0.80931, "INR": 80.654494, "IQD": 1547.023976, "IRR": 44538.040218, "ISK": 137.457233, "JEP": 0.80931, "JMD": 163.910125, "JOD": 0.746498, "JPY": 137.331903, "KES": 121.87429, "KGS": 88.581418, "KHR": 4286.72178, "KMF": 486.443591, "KPW": 947.617993, "KRW": 1339.837191, "KWD": 0.322886, "KYD": 0.883397, "KZT": 473.770223, "LAK": 12761.755235, "LBP": 1602.661797, "LKR": 376.293562, "LRD": 159.989586, "LSL": 15.604181, "LTL": 3.108965, "LVL": 0.636894, "LYD": 5.031557, "MAD": 10.541225, "MDL": 19.593772, "MGA": 4284.002369, "MKD": 61.553251, "MMK": 1962.574442, "MNT": 3153.317641, "MOP": 8.567461, "MRO": 375.88824, "MUR": 45.165684, "MVR": 16.199478, "MWK": 865.62318, "MXN": 21.530268, "MYR": 4.594366, "MZN": 67.206888, "NAD": 15.604214, "NGN": 437.399752, "NIO": 37.965356, "NOK": 9.824365, "NPR": 129.86672, "NZD": 1.616441, "OMR": 0.405421, "PAB": 1.060027, "PEN": 4.054233, "PGK": 3.73593, "PHP": 55.075028, "PKR": 196.760944, "PLN": 4.698101, "PYG": 7246.992296, "QAR": 3.833603, "RON": 4.948144, "RSD": 117.620172, "RUB": 77.806269, "RWF": 1086.709833, "SAR": 3.949063, "SBD": 8.474149, "SCR": 14.304711, "SDG": 470.649944, "SEK": 10.367719, "SGD": 1.459695, "SHP": 1.45028, "SLL": 13082.391386, "SOS": 609.634325, "SRD": 21.904702, "STD": 21793.085136, "SVC": 9.275519, "SYP": 2645.380032, "SZL": 16.827859, "THB": 36.297991, "TJS": 13.196811, "TMT": 3.685181, "TND": 3.22348, "TOP": 2.428117, "TRY": 15.575532, "TTD": 7.202107, "TWD": 31.082183, "TZS": 2446.960099, "UAH": 32.065033, "UGX": 3773.578577, "USD": 1.052909, "UYU": 43.156886, "UZS": 11895.19696, "VEF": 225143710305.04727, "VND": 24171.62598, "VUV": 118.538204, "WST": 2.722234, "XAF": 655.287181, "XAG": 0.045404, "XAU": 0.000559, "XCD": 2.845538, "XDR": 0.783307, "XOF": 655.293398, "XPF": 118.347299, "YER": 263.490114, "ZAR": 16.77336, "ZMK": 9477.445964, "ZMW": 18.046154, "ZWL": 339.036185}}, "emitted_at": 1651130169364}} ``` There we have it - a stream which reads data in just a few lines of code! @@ -127,10 +128,10 @@ Let's get the easy parts out of the way and pass the `start_date`: ```python def streams(self, config: Mapping[str, Any]) -> List[Stream]: - auth = NoAuth() - # Parse the date from a string into a datetime object - start_date = datetime.strptime(config['start_date'], '%Y-%m-%d') - return [ExchangeRates(authenticator=auth, base=config['base'], start_date=start_date)] + auth = NoAuth() + # Parse the date from a string into a datetime object + start_date = datetime.strptime(config['start_date'], '%Y-%m-%d') + return [ExchangeRates(authenticator=auth, config=config, start_date=start_date)] ``` Let's also add this parameter to the constructor and declare the `cursor_field`: @@ -141,18 +142,19 @@ from airbyte_cdk.sources.streams import IncrementalMixin class ExchangeRates(HttpStream, IncrementalMixin): - url_base = "https://api.exchangeratesapi.io/" + url_base = "http://api.exchangeratesapi.io/" cursor_field = "date" primary_key = "date" - def __init__(self, base: str, start_date: datetime, **kwargs): + def __init__(self, config: Mapping[str, Any], start_date: datetime, **kwargs): super().__init__() - self.base = base + self.base = config['base'] + self.access_key = config['access_key'] self.start_date = start_date self._cursor_value = None ``` -Declaring the `cursor_field` informs the framework that this stream now supports incremental sync. The next time you run `python main_dev.py discover --config sample_files/config.json` you'll find that the `supported_sync_modes` field now also contains `incremental`. +Declaring the `cursor_field` informs the framework that this stream now supports incremental sync. The next time you run `python main_dev.py discover --config secrets/config.json` you'll find that the `supported_sync_modes` field now also contains `incremental`. But we're not quite done with supporting incremental, we have to actually emit state! We'll structure our state object very simply: it will be a `dict` whose single key is `'date'` and value is the date of the last day we synced data from. For example, `{'date': '2021-04-26'}` indicates the connector previously read data up until April 26th and therefore shouldn't re-read anything before April 26th. @@ -226,17 +228,17 @@ We should now have a working implementation of incremental sync! Let's try it out: ```text -python main.py read --config sample_files/config.json --catalog sample_files/configured_catalog.json +python main.py read --config secrets/config.json --catalog sample_files/configured_catalog.json ``` You should see a bunch of `RECORD` messages and `STATE` messages. To verify that incremental sync is working, pass the input state back to the connector and run it again: ```text # Save the latest state to sample_files/state.json -python main.py read --config sample_files/config.json --catalog sample_files/configured_catalog.json | grep STATE | tail -n 1 | jq .state.data > sample_files/state.json +python main.py read --config secrets/config.json --catalog sample_files/configured_catalog.json | grep STATE | tail -n 1 | jq .state.data > sample_files/state.json # Run a read operation with the latest state message -python main.py read --config sample_files/config.json --catalog sample_files/configured_catalog.json --state sample_files/state.json +python main.py read --config secrets/config.json --catalog sample_files/configured_catalog.json --state sample_files/state.json ``` You should see that only the record from the last date is being synced! This is acceptable behavior, since Airbyte requires at-least-once delivery of records, so repeating the last record twice is OK. From 553024780f60bcfcb771968f0b5d8c0bf2221176 Mon Sep 17 00:00:00 2001 From: Alexandre Girard Date: Fri, 29 Apr 2022 15:34:43 -0700 Subject: [PATCH 039/152] Destination Bigquery: Dataset location is a required field (#12477) * Dataset location is required * Update doc * Bump versions * reset * Bump * downgrade * Revert "downgrade" This reverts commit b482c54d6f04f386b5da03edf00df915985f572e. * auto-bump connector version Co-authored-by: Octavia Squidington III --- .../seed/destination_definitions.yaml | 4 +- .../resources/seed/destination_specs.yaml | 20 ++-- .../Dockerfile | 2 +- .../destination-bigquery/Dockerfile | 2 +- .../src/main/resources/spec.json | 15 ++- docs/integrations/destinations/bigquery.md | 100 +++++++++--------- 6 files changed, 73 insertions(+), 70 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml index e9f195a598225e..c191692f1aeb32 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml @@ -24,7 +24,7 @@ - name: BigQuery destinationDefinitionId: 22f6c74f-5699-40ff-833c-4a879ea40133 dockerRepository: airbyte/destination-bigquery - dockerImageTag: 1.1.1 + dockerImageTag: 1.1.2 documentationUrl: https://docs.airbyte.io/integrations/destinations/bigquery icon: bigquery.svg resourceRequirements: @@ -36,7 +36,7 @@ - name: BigQuery (denormalized typed struct) destinationDefinitionId: 079d5540-f236-4294-ba7c-ade8fd918496 dockerRepository: airbyte/destination-bigquery-denormalized - dockerImageTag: 0.3.1 + dockerImageTag: 0.3.2 documentationUrl: https://docs.airbyte.io/integrations/destinations/bigquery icon: bigquery.svg resourceRequirements: diff --git a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml index dfa8a93846274e..144375d382e74e 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml @@ -285,7 +285,7 @@ supported_destination_sync_modes: - "overwrite" - "append" -- dockerImage: "airbyte/destination-bigquery:1.1.1" +- dockerImage: "airbyte/destination-bigquery:1.1.2" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/bigquery" connectionSpecification: @@ -294,6 +294,7 @@ type: "object" required: - "project_id" + - "dataset_location" - "dataset_id" additionalProperties: true properties: @@ -317,20 +318,13 @@ \ dataset. Read more here." title: "Project ID" - dataset_id: - type: "string" - description: "The default BigQuery Dataset ID that tables are replicated\ - \ to if the source does not specify a namespace. Read more here." - title: "Default Dataset ID" dataset_location: type: "string" description: "The location of the dataset. Warning: Changes made after creation\ \ will not be applied. The default \"US\" value is used if not set explicitly.\ \ Read more here." - title: "Dataset Location (Optional)" - default: "US" + title: "Dataset Location" enum: - "US" - "EU" @@ -363,6 +357,12 @@ - "us-west2" - "us-west3" - "us-west4" + dataset_id: + type: "string" + description: "The default BigQuery Dataset ID that tables are replicated\ + \ to if the source does not specify a namespace. Read more here." + title: "Default Dataset ID" credentials_json: type: "string" description: "The contents of the JSON service account key. Check out the\ @@ -495,7 +495,7 @@ - "overwrite" - "append" - "append_dedup" -- dockerImage: "airbyte/destination-bigquery-denormalized:0.3.1" +- dockerImage: "airbyte/destination-bigquery-denormalized:0.3.2" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/bigquery" connectionSpecification: diff --git a/airbyte-integrations/connectors/destination-bigquery-denormalized/Dockerfile b/airbyte-integrations/connectors/destination-bigquery-denormalized/Dockerfile index 254ccae1e75b82..c1a1ad24403bda 100644 --- a/airbyte-integrations/connectors/destination-bigquery-denormalized/Dockerfile +++ b/airbyte-integrations/connectors/destination-bigquery-denormalized/Dockerfile @@ -17,5 +17,5 @@ ENV ENABLE_SENTRY true COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.3.1 +LABEL io.airbyte.version=0.3.2 LABEL io.airbyte.name=airbyte/destination-bigquery-denormalized diff --git a/airbyte-integrations/connectors/destination-bigquery/Dockerfile b/airbyte-integrations/connectors/destination-bigquery/Dockerfile index ceab2eaaa91069..0804008a69dad3 100644 --- a/airbyte-integrations/connectors/destination-bigquery/Dockerfile +++ b/airbyte-integrations/connectors/destination-bigquery/Dockerfile @@ -17,5 +17,5 @@ ENV ENABLE_SENTRY true COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=1.1.1 +LABEL io.airbyte.version=1.1.2 LABEL io.airbyte.name=airbyte/destination-bigquery diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-bigquery/src/main/resources/spec.json index 3e74bbb81283ab..3195be26c9d5ec 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/resources/spec.json @@ -8,7 +8,7 @@ "$schema": "http://json-schema.org/draft-07/schema#", "title": "BigQuery Destination Spec", "type": "object", - "required": ["project_id", "dataset_id"], + "required": ["project_id", "dataset_location", "dataset_id"], "additionalProperties": true, "properties": { "big_query_client_buffer_size_mb": { @@ -25,16 +25,10 @@ "description": "The GCP project ID for the project containing the target BigQuery dataset. Read more here.", "title": "Project ID" }, - "dataset_id": { - "type": "string", - "description": "The default BigQuery Dataset ID that tables are replicated to if the source does not specify a namespace. Read more here.", - "title": "Default Dataset ID" - }, "dataset_location": { "type": "string", "description": "The location of the dataset. Warning: Changes made after creation will not be applied. The default \"US\" value is used if not set explicitly. Read more here.", - "title": "Dataset Location (Optional)", - "default": "US", + "title": "Dataset Location", "enum": [ "US", "EU", @@ -69,6 +63,11 @@ "us-west4" ] }, + "dataset_id": { + "type": "string", + "description": "The default BigQuery Dataset ID that tables are replicated to if the source does not specify a namespace. Read more here.", + "title": "Default Dataset ID" + }, "credentials_json": { "type": "string", "description": "The contents of the JSON service account key. Check out the docs if you need help generating this key. Default credentials will be used if this field is left empty.", diff --git a/docs/integrations/destinations/bigquery.md b/docs/integrations/destinations/bigquery.md index 6916d66d93b03e..ad4a7deefcf912 100644 --- a/docs/integrations/destinations/bigquery.md +++ b/docs/integrations/destinations/bigquery.md @@ -177,75 +177,79 @@ There are 2 available options to upload data to BigQuery `Standard` and `GCS Sta ### `GCS Staging` This is the recommended configuration for uploading data to BigQuery. It works by first uploading all the data to a [GCS](https://cloud.google.com/storage) bucket, then ingesting the data to BigQuery. To configure GCS Staging, you'll need the following parameters: + * **GCS Bucket Name** * **GCS Bucket Path** * **Block Size (MB) for GCS multipart upload** * **GCS Bucket Keep files after migration** - * See [this](https://cloud.google.com/storage/docs/creating-buckets) for instructions on how to create a GCS bucket. The bucket cannot have a retention policy. Set Protection Tools to none or Object versioning. + * See [this](https://cloud.google.com/storage/docs/creating-buckets) for instructions on how to create a GCS bucket. The bucket cannot have a retention policy. Set Protection Tools to none or Object versioning. * **HMAC Key Access ID** - * See [this](https://cloud.google.com/storage/docs/authentication/managing-hmackeys) on how to generate an access key. For more information on hmac keys please reference the [GCP docs](https://cloud.google.com/storage/docs/authentication/hmackeys) - * We recommend creating an Airbyte-specific user or service account. This user or account will require the following permissions for the bucket: - ``` - storage.multipartUploads.abort - storage.multipartUploads.create - storage.objects.create - storage.objects.delete - storage.objects.get - storage.objects.list - ``` - You can set those by going to the permissions tab in the GCS bucket and adding the appropriate the email address of the service account or user and adding the aforementioned permissions. + * See [this](https://cloud.google.com/storage/docs/authentication/managing-hmackeys) on how to generate an access key. For more information on hmac keys please reference the [GCP docs](https://cloud.google.com/storage/docs/authentication/hmackeys) + * We recommend creating an Airbyte-specific user or service account. This user or account will require the following permissions for the bucket: + ``` + storage.multipartUploads.abort + storage.multipartUploads.create + storage.objects.create + storage.objects.delete + storage.objects.get + storage.objects.list + ``` + You can set those by going to the permissions tab in the GCS bucket and adding the appropriate the email address of the service account or user and adding the aforementioned permissions. * **Secret Access Key** - * Corresponding key to the above access ID. + * Corresponding key to the above access ID. * Make sure your GCS bucket is accessible from the machine running Airbyte. This depends on your networking setup. The easiest way to verify if Airbyte is able to connect to your GCS bucket is via the check connection tool in the UI. ### `Standard` uploads + This uploads data directly from your source to BigQuery. While this is faster to setup initially, **we strongly recommend that you do not use this option for anything other than a quick demo**. It is more than 10x slower than the GCS uploading option and will fail for many datasets. Please be aware you may see some failures for big datasets and slow sources, e.g. if reading from source takes more than 10-12 hours. This is caused by the Google BigQuery SDK client limitations. For more details please check [https://github.com/airbytehq/airbyte/issues/3549](https://github.com/airbytehq/airbyte/issues/3549) ## CHANGELOG ### bigquery -| Version | Date | Pull Request | Subject | -|:--------| :--- | :--- | :--- | -| 1.1.1 | 2022-04-15 | [12068](https://github.com/airbytehq/airbyte/pull/12068) | Fixed bug with GCS bucket conditional binding | -| 1.1.0 | 2022-04-06 | [11776](https://github.com/airbytehq/airbyte/pull/11776) | Use serialized buffering strategy to reduce memory consumption. | -| 1.0.2 | 2022-03-30 | [11620](https://github.com/airbytehq/airbyte/pull/11620) | Updated spec | -| 1.0.1 | 2022-03-24 | [11350](https://github.com/airbytehq/airbyte/pull/11350) | Improve check performance | -| 1.0.0 | 2022-03-18 | [11238](https://github.com/airbytehq/airbyte/pull/11238) | Updated spec and documentation | -| 0.6.12 | 2022-03-18 | [10793](https://github.com/airbytehq/airbyte/pull/10793) | Fix namespace with invalid characters | -| 0.6.11 | 2022-03-03 | [10755](https://github.com/airbytehq/airbyte/pull/10755) | Make sure to kill children threads and stop JVM | -| 0.6.8 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `-XX:+ExitOnOutOfMemoryError` JVM option | -| 0.6.6 | 2022-02-01 | [\#9959](https://github.com/airbytehq/airbyte/pull/9959) | Fix null pointer exception from buffered stream consumer. | -| 0.6.6 | 2022-01-29 | [\#9745](https://github.com/airbytehq/airbyte/pull/9745) | Integrate with Sentry. | -| 0.6.5 | 2022-01-18 | [\#9573](https://github.com/airbytehq/airbyte/pull/9573) | BigQuery Destination : update description for some input fields | -| 0.6.4 | 2022-01-17 | [\#8383](https://github.com/airbytehq/airbyte/issues/8383) | Support dataset-id prefixed by project-id | -| 0.6.3 | 2022-01-12 | [\#9415](https://github.com/airbytehq/airbyte/pull/9415) | BigQuery Destination : Fix GCS processing of Facebook data | -| 0.6.2 | 2022-01-10 | [\#9121](https://github.com/airbytehq/airbyte/pull/9121) | Fixed check method for GCS mode to verify if all roles assigned to user | -| 0.6.1 | 2021-12-22 | [\#9039](https://github.com/airbytehq/airbyte/pull/9039) | Added part_size configuration to UI for GCS staging | +| Version | Date | Pull Request | Subject | +|:--------|:-----------| :--- |:--------------------------------------------------------------------------------------------| +| 1.1.2 | 2022-04-29 | [12477](https://github.com/airbytehq/airbyte/pull/12477) | Dataset location is a required field | +| 1.1.1 | 2022-04-15 | [12068](https://github.com/airbytehq/airbyte/pull/12068) | Fixed bug with GCS bucket conditional binding | +| 1.1.0 | 2022-04-06 | [11776](https://github.com/airbytehq/airbyte/pull/11776) | Use serialized buffering strategy to reduce memory consumption. | +| 1.0.2 | 2022-03-30 | [11620](https://github.com/airbytehq/airbyte/pull/11620) | Updated spec | +| 1.0.1 | 2022-03-24 | [11350](https://github.com/airbytehq/airbyte/pull/11350) | Improve check performance | +| 1.0.0 | 2022-03-18 | [11238](https://github.com/airbytehq/airbyte/pull/11238) | Updated spec and documentation | +| 0.6.12 | 2022-03-18 | [10793](https://github.com/airbytehq/airbyte/pull/10793) | Fix namespace with invalid characters | +| 0.6.11 | 2022-03-03 | [10755](https://github.com/airbytehq/airbyte/pull/10755) | Make sure to kill children threads and stop JVM | +| 0.6.8 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `-XX:+ExitOnOutOfMemoryError` JVM option | +| 0.6.6 | 2022-02-01 | [\#9959](https://github.com/airbytehq/airbyte/pull/9959) | Fix null pointer exception from buffered stream consumer. | +| 0.6.6 | 2022-01-29 | [\#9745](https://github.com/airbytehq/airbyte/pull/9745) | Integrate with Sentry. | +| 0.6.5 | 2022-01-18 | [\#9573](https://github.com/airbytehq/airbyte/pull/9573) | BigQuery Destination : update description for some input fields | +| 0.6.4 | 2022-01-17 | [\#8383](https://github.com/airbytehq/airbyte/issues/8383) | Support dataset-id prefixed by project-id | +| 0.6.3 | 2022-01-12 | [\#9415](https://github.com/airbytehq/airbyte/pull/9415) | BigQuery Destination : Fix GCS processing of Facebook data | +| 0.6.2 | 2022-01-10 | [\#9121](https://github.com/airbytehq/airbyte/pull/9121) | Fixed check method for GCS mode to verify if all roles assigned to user | +| 0.6.1 | 2021-12-22 | [\#9039](https://github.com/airbytehq/airbyte/pull/9039) | Added part_size configuration to UI for GCS staging | | 0.6.0 | 2021-12-17 | [\#8788](https://github.com/airbytehq/airbyte/issues/8788) | BigQuery/BiqQuery denorm Destinations : Add possibility to use different types of GCS files | -| 0.5.1 | 2021-12-16 | [\#8816](https://github.com/airbytehq/airbyte/issues/8816) | Update dataset locations | -| 0.5.0 | 2021-10-26 | [\#7240](https://github.com/airbytehq/airbyte/issues/7240) | Output partitioned/clustered tables | -| 0.4.1 | 2021-10-04 | [\#6733](https://github.com/airbytehq/airbyte/issues/6733) | Support dataset starting with numbers | -| 0.4.0 | 2021-08-26 | [\#5296](https://github.com/airbytehq/airbyte/issues/5296) | Added GCS Staging uploading option | -| 0.3.12 | 2021-08-03 | [\#3549](https://github.com/airbytehq/airbyte/issues/3549) | Add optional arg to make a possibility to change the BigQuery client's chunk\buffer size | -| 0.3.11 | 2021-07-30 | [\#5125](https://github.com/airbytehq/airbyte/pull/5125) | Enable `additionalPropertities` in spec.json | -| 0.3.10 | 2021-07-28 | [\#3549](https://github.com/airbytehq/airbyte/issues/3549) | Add extended logs and made JobId filled with region and projectId | -| 0.3.9 | 2021-07-28 | [\#5026](https://github.com/airbytehq/airbyte/pull/5026) | Add sanitized json fields in raw tables to handle quotes in column names | -| 0.3.6 | 2021-06-18 | [\#3947](https://github.com/airbytehq/airbyte/issues/3947) | Service account credentials are now optional. | -| 0.3.4 | 2021-06-07 | [\#3277](https://github.com/airbytehq/airbyte/issues/3277) | Add dataset location option | +| 0.5.1 | 2021-12-16 | [\#8816](https://github.com/airbytehq/airbyte/issues/8816) | Update dataset locations | +| 0.5.0 | 2021-10-26 | [\#7240](https://github.com/airbytehq/airbyte/issues/7240) | Output partitioned/clustered tables | +| 0.4.1 | 2021-10-04 | [\#6733](https://github.com/airbytehq/airbyte/issues/6733) | Support dataset starting with numbers | +| 0.4.0 | 2021-08-26 | [\#5296](https://github.com/airbytehq/airbyte/issues/5296) | Added GCS Staging uploading option | +| 0.3.12 | 2021-08-03 | [\#3549](https://github.com/airbytehq/airbyte/issues/3549) | Add optional arg to make a possibility to change the BigQuery client's chunk\buffer size | +| 0.3.11 | 2021-07-30 | [\#5125](https://github.com/airbytehq/airbyte/pull/5125) | Enable `additionalPropertities` in spec.json | +| 0.3.10 | 2021-07-28 | [\#3549](https://github.com/airbytehq/airbyte/issues/3549) | Add extended logs and made JobId filled with region and projectId | +| 0.3.9 | 2021-07-28 | [\#5026](https://github.com/airbytehq/airbyte/pull/5026) | Add sanitized json fields in raw tables to handle quotes in column names | +| 0.3.6 | 2021-06-18 | [\#3947](https://github.com/airbytehq/airbyte/issues/3947) | Service account credentials are now optional. | +| 0.3.4 | 2021-06-07 | [\#3277](https://github.com/airbytehq/airbyte/issues/3277) | Add dataset location option | ### bigquery-denormalized | Version | Date | Pull Request | Subject | |:--------|:-----------|:-----------------------------------------------------------| :--- | -| 0.3.1 | 2022-04-15 | [11978](https://github.com/airbytehq/airbyte/pull/11978) | Fixed emittedAt timestamp. | -| 0.3.0 | 2022-04-06 | [11776](https://github.com/airbytehq/airbyte/pull/11776) | Use serialized buffering strategy to reduce memory consumption. | -| 0.2.15 | 2022-04-05 | [11166](https://github.com/airbytehq/airbyte/pull/11166) | Fixed handling of anyOf and allOf fields | -| 0.2.14 | 2022-04-02 | [11620](https://github.com/airbytehq/airbyte/pull/11620) | Updated spec | -| 0.2.13 | 2022-04-01 | [11636](https://github.com/airbytehq/airbyte/pull/11636) | Added new unit tests | -| 0.2.12 | 2022-03-28 | [11454](https://github.com/airbytehq/airbyte/pull/11454) | Integration test enhancement for picking test-data and schemas | -| 0.2.11 | 2022-03-18 | [10793](https://github.com/airbytehq/airbyte/pull/10793) | Fix namespace with invalid characters | -| 0.2.10 | 2022-03-03 | [10755](https://github.com/airbytehq/airbyte/pull/10755) | Make sure to kill children threads and stop JVM | +| 0.3.2 | 2022-04-29 | [12477](https://github.com/airbytehq/airbyte/pull/12477) | Dataset location is a required field | +| 0.3.1 | 2022-04-15 | [11978](https://github.com/airbytehq/airbyte/pull/11978) | Fixed emittedAt timestamp. | +| 0.3.0 | 2022-04-06 | [11776](https://github.com/airbytehq/airbyte/pull/11776) | Use serialized buffering strategy to reduce memory consumption. | +| 0.2.15 | 2022-04-05 | [11166](https://github.com/airbytehq/airbyte/pull/11166) | Fixed handling of anyOf and allOf fields | +| 0.2.14 | 2022-04-02 | [11620](https://github.com/airbytehq/airbyte/pull/11620) | Updated spec | +| 0.2.13 | 2022-04-01 | [11636](https://github.com/airbytehq/airbyte/pull/11636) | Added new unit tests | +| 0.2.12 | 2022-03-28 | [11454](https://github.com/airbytehq/airbyte/pull/11454) | Integration test enhancement for picking test-data and schemas | +| 0.2.11 | 2022-03-18 | [10793](https://github.com/airbytehq/airbyte/pull/10793) | Fix namespace with invalid characters | +| 0.2.10 | 2022-03-03 | [10755](https://github.com/airbytehq/airbyte/pull/10755) | Make sure to kill children threads and stop JVM | | 0.2.8 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `-XX:+ExitOnOutOfMemoryError` JVM option | | 0.2.7 | 2022-02-01 | [\#9959](https://github.com/airbytehq/airbyte/pull/9959) | Fix null pointer exception from buffered stream consumer. | | 0.2.6 | 2022-01-29 | [\#9745](https://github.com/airbytehq/airbyte/pull/9745) | Integrate with Sentry. | From f6791b854f0a0e69c84c38617200eec808f140ff Mon Sep 17 00:00:00 2001 From: Octavia Squidington III <90398440+octavia-squidington-iii@users.noreply.github.com> Date: Fri, 29 Apr 2022 21:28:39 -0300 Subject: [PATCH 040/152] Bump Airbyte version from 0.36.5-alpha to 0.36.6-alpha (#12485) Co-authored-by: girarda --- .bumpversion.cfg | 2 +- .env | 2 +- airbyte-bootloader/Dockerfile | 2 +- airbyte-container-orchestrator/Dockerfile | 2 +- airbyte-metrics/reporter/Dockerfile | 2 +- airbyte-scheduler/app/Dockerfile | 2 +- airbyte-server/Dockerfile | 2 +- airbyte-webapp/package-lock.json | 4 ++-- airbyte-webapp/package.json | 2 +- airbyte-workers/Dockerfile | 2 +- charts/airbyte/Chart.yaml | 2 +- charts/airbyte/README.md | 10 +++++----- charts/airbyte/values.yaml | 10 +++++----- docs/operator-guides/upgrading-airbyte.md | 2 +- kube/overlays/stable-with-resource-limits/.env | 2 +- .../stable-with-resource-limits/kustomization.yaml | 12 ++++++------ kube/overlays/stable/.env | 2 +- kube/overlays/stable/kustomization.yaml | 12 ++++++------ octavia-cli/Dockerfile | 2 +- octavia-cli/README.md | 2 +- octavia-cli/install.sh | 2 +- octavia-cli/setup.py | 2 +- 22 files changed, 41 insertions(+), 41 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 65df21860e0807..7cbf98e4587d74 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.36.5-alpha +current_version = 0.36.6-alpha commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\-[a-z]+)? diff --git a/.env b/.env index 12df5d2e93b97a..8155983831749a 100644 --- a/.env +++ b/.env @@ -10,7 +10,7 @@ ### SHARED ### -VERSION=0.36.5-alpha +VERSION=0.36.6-alpha # When using the airbyte-db via default docker image CONFIG_ROOT=/data diff --git a/airbyte-bootloader/Dockerfile b/airbyte-bootloader/Dockerfile index ddca518b1f5fa0..74e2c9d0c95932 100644 --- a/airbyte-bootloader/Dockerfile +++ b/airbyte-bootloader/Dockerfile @@ -1,7 +1,7 @@ ARG JDK_VERSION=17.0.1 FROM openjdk:${JDK_VERSION}-slim -ARG VERSION=0.36.5-alpha +ARG VERSION=0.36.6-alpha ENV APPLICATION airbyte-bootloader ENV VERSION ${VERSION} diff --git a/airbyte-container-orchestrator/Dockerfile b/airbyte-container-orchestrator/Dockerfile index 159cf08bae891a..4eb292021a6bdf 100644 --- a/airbyte-container-orchestrator/Dockerfile +++ b/airbyte-container-orchestrator/Dockerfile @@ -25,7 +25,7 @@ RUN curl -fsSLo /usr/share/keyrings/kubernetes-archive-keyring.gpg https://packa RUN echo "deb [signed-by=/usr/share/keyrings/kubernetes-archive-keyring.gpg] https://apt.kubernetes.io/ kubernetes-xenial main" | tee /etc/apt/sources.list.d/kubernetes.list RUN apt-get update && apt-get install -y kubectl -ARG VERSION=0.36.5-alpha +ARG VERSION=0.36.6-alpha ENV APPLICATION airbyte-container-orchestrator ENV VERSION=${VERSION} diff --git a/airbyte-metrics/reporter/Dockerfile b/airbyte-metrics/reporter/Dockerfile index 2282a6923040b2..25890b1690b434 100644 --- a/airbyte-metrics/reporter/Dockerfile +++ b/airbyte-metrics/reporter/Dockerfile @@ -1,7 +1,7 @@ ARG JDK_VERSION=17.0.1 FROM openjdk:${JDK_VERSION}-slim AS metrics-reporter -ARG VERSION=0.36.5-alpha +ARG VERSION=0.36.6-alpha ENV APPLICATION airbyte-metrics-reporter ENV VERSION ${VERSION} diff --git a/airbyte-scheduler/app/Dockerfile b/airbyte-scheduler/app/Dockerfile index b708b1982cf318..b841e267f1ed10 100644 --- a/airbyte-scheduler/app/Dockerfile +++ b/airbyte-scheduler/app/Dockerfile @@ -1,7 +1,7 @@ ARG JDK_VERSION=17.0.1 FROM openjdk:${JDK_VERSION}-slim AS scheduler -ARG VERSION=0.36.5-alpha +ARG VERSION=0.36.6-alpha ENV APPLICATION airbyte-scheduler ENV VERSION ${VERSION} diff --git a/airbyte-server/Dockerfile b/airbyte-server/Dockerfile index a6c2b94c27ca76..6f514552ec5aa4 100644 --- a/airbyte-server/Dockerfile +++ b/airbyte-server/Dockerfile @@ -3,7 +3,7 @@ FROM openjdk:${JDK_VERSION}-slim AS server EXPOSE 8000 -ARG VERSION=0.36.5-alpha +ARG VERSION=0.36.6-alpha ENV APPLICATION airbyte-server ENV VERSION ${VERSION} diff --git a/airbyte-webapp/package-lock.json b/airbyte-webapp/package-lock.json index 0aa1fa28a709bf..3625e37351bde8 100644 --- a/airbyte-webapp/package-lock.json +++ b/airbyte-webapp/package-lock.json @@ -1,12 +1,12 @@ { "name": "airbyte-webapp", - "version": "0.36.5-alpha", + "version": "0.36.6-alpha", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "airbyte-webapp", - "version": "0.36.5-alpha", + "version": "0.36.6-alpha", "dependencies": { "@fortawesome/fontawesome-svg-core": "^6.1.1", "@fortawesome/free-brands-svg-icons": "^6.1.1", diff --git a/airbyte-webapp/package.json b/airbyte-webapp/package.json index 065e704118f59d..317d3635b31b4a 100644 --- a/airbyte-webapp/package.json +++ b/airbyte-webapp/package.json @@ -1,6 +1,6 @@ { "name": "airbyte-webapp", - "version": "0.36.5-alpha", + "version": "0.36.6-alpha", "private": true, "engines": { "node": ">=16.0.0" diff --git a/airbyte-workers/Dockerfile b/airbyte-workers/Dockerfile index 8c2b9da37ecc73..efe47ace83bc6d 100644 --- a/airbyte-workers/Dockerfile +++ b/airbyte-workers/Dockerfile @@ -25,7 +25,7 @@ RUN curl -fsSLo /usr/share/keyrings/kubernetes-archive-keyring.gpg https://packa RUN echo "deb [signed-by=/usr/share/keyrings/kubernetes-archive-keyring.gpg] https://apt.kubernetes.io/ kubernetes-xenial main" | tee /etc/apt/sources.list.d/kubernetes.list RUN apt-get update && apt-get install -y kubectl -ARG VERSION=0.36.5-alpha +ARG VERSION=0.36.6-alpha ENV APPLICATION airbyte-workers ENV VERSION ${VERSION} diff --git a/charts/airbyte/Chart.yaml b/charts/airbyte/Chart.yaml index ace0f2c1c0d490..fbcf3833152504 100644 --- a/charts/airbyte/Chart.yaml +++ b/charts/airbyte/Chart.yaml @@ -21,7 +21,7 @@ version: 0.3.1 # incremented each time you make changes to the application. Versions are not expected to # follow Semantic Versioning. They should reflect the version the application is using. # It is recommended to use it with quotes. -appVersion: "0.36.5-alpha" +appVersion: "0.36.6-alpha" dependencies: - name: common diff --git a/charts/airbyte/README.md b/charts/airbyte/README.md index e7bbf27c282391..22c78bede3fa54 100644 --- a/charts/airbyte/README.md +++ b/charts/airbyte/README.md @@ -31,7 +31,7 @@ Helm charts for Airbyte. | `webapp.replicaCount` | Number of webapp replicas | `1` | | `webapp.image.repository` | The repository to use for the airbyte webapp image. | `airbyte/webapp` | | `webapp.image.pullPolicy` | the pull policy to use for the airbyte webapp image | `IfNotPresent` | -| `webapp.image.tag` | The airbyte webapp image tag. Defaults to the chart's AppVersion | `0.36.5-alpha` | +| `webapp.image.tag` | The airbyte webapp image tag. Defaults to the chart's AppVersion | `0.36.6-alpha` | | `webapp.podAnnotations` | Add extra annotations to the webapp pod(s) | `{}` | | `webapp.containerSecurityContext` | Security context for the container | `{}` | | `webapp.livenessProbe.enabled` | Enable livenessProbe on the webapp | `true` | @@ -73,7 +73,7 @@ Helm charts for Airbyte. | `scheduler.replicaCount` | Number of scheduler replicas | `1` | | `scheduler.image.repository` | The repository to use for the airbyte scheduler image. | `airbyte/scheduler` | | `scheduler.image.pullPolicy` | the pull policy to use for the airbyte scheduler image | `IfNotPresent` | -| `scheduler.image.tag` | The airbyte scheduler image tag. Defaults to the chart's AppVersion | `0.36.5-alpha` | +| `scheduler.image.tag` | The airbyte scheduler image tag. Defaults to the chart's AppVersion | `0.36.6-alpha` | | `scheduler.podAnnotations` | Add extra annotations to the scheduler pod | `{}` | | `scheduler.resources.limits` | The resources limits for the scheduler container | `{}` | | `scheduler.resources.requests` | The requested resources for the scheduler container | `{}` | @@ -120,7 +120,7 @@ Helm charts for Airbyte. | `server.replicaCount` | Number of server replicas | `1` | | `server.image.repository` | The repository to use for the airbyte server image. | `airbyte/server` | | `server.image.pullPolicy` | the pull policy to use for the airbyte server image | `IfNotPresent` | -| `server.image.tag` | The airbyte server image tag. Defaults to the chart's AppVersion | `0.36.5-alpha` | +| `server.image.tag` | The airbyte server image tag. Defaults to the chart's AppVersion | `0.36.6-alpha` | | `server.podAnnotations` | Add extra annotations to the server pod | `{}` | | `server.containerSecurityContext` | Security context for the container | `{}` | | `server.livenessProbe.enabled` | Enable livenessProbe on the server | `true` | @@ -158,7 +158,7 @@ Helm charts for Airbyte. | `worker.replicaCount` | Number of worker replicas | `1` | | `worker.image.repository` | The repository to use for the airbyte worker image. | `airbyte/worker` | | `worker.image.pullPolicy` | the pull policy to use for the airbyte worker image | `IfNotPresent` | -| `worker.image.tag` | The airbyte worker image tag. Defaults to the chart's AppVersion | `0.36.5-alpha` | +| `worker.image.tag` | The airbyte worker image tag. Defaults to the chart's AppVersion | `0.36.6-alpha` | | `worker.podAnnotations` | Add extra annotations to the worker pod(s) | `{}` | | `worker.containerSecurityContext` | Security context for the container | `{}` | | `worker.livenessProbe.enabled` | Enable livenessProbe on the worker | `true` | @@ -190,7 +190,7 @@ Helm charts for Airbyte. | ----------------------------- | -------------------------------------------------------------------- | -------------------- | | `bootloader.image.repository` | The repository to use for the airbyte bootloader image. | `airbyte/bootloader` | | `bootloader.image.pullPolicy` | the pull policy to use for the airbyte bootloader image | `IfNotPresent` | -| `bootloader.image.tag` | The airbyte bootloader image tag. Defaults to the chart's AppVersion | `0.36.5-alpha` | +| `bootloader.image.tag` | The airbyte bootloader image tag. Defaults to the chart's AppVersion | `0.36.6-alpha` | ### Temporal parameters diff --git a/charts/airbyte/values.yaml b/charts/airbyte/values.yaml index 1e445caba5dfa2..ad5ec07f8fcb67 100644 --- a/charts/airbyte/values.yaml +++ b/charts/airbyte/values.yaml @@ -43,7 +43,7 @@ webapp: image: repository: airbyte/webapp pullPolicy: IfNotPresent - tag: 0.36.5-alpha + tag: 0.36.6-alpha ## @param webapp.podAnnotations [object] Add extra annotations to the webapp pod(s) ## @@ -209,7 +209,7 @@ scheduler: image: repository: airbyte/scheduler pullPolicy: IfNotPresent - tag: 0.36.5-alpha + tag: 0.36.6-alpha ## @param scheduler.podAnnotations [object] Add extra annotations to the scheduler pod ## @@ -440,7 +440,7 @@ server: image: repository: airbyte/server pullPolicy: IfNotPresent - tag: 0.36.5-alpha + tag: 0.36.6-alpha ## @param server.podAnnotations [object] Add extra annotations to the server pod ## @@ -581,7 +581,7 @@ worker: image: repository: airbyte/worker pullPolicy: IfNotPresent - tag: 0.36.5-alpha + tag: 0.36.6-alpha ## @param worker.podAnnotations [object] Add extra annotations to the worker pod(s) ## @@ -699,7 +699,7 @@ bootloader: image: repository: airbyte/bootloader pullPolicy: IfNotPresent - tag: 0.36.5-alpha + tag: 0.36.6-alpha ## @param bootloader.podAnnotations [object] Add extra annotations to the bootloader pod ## diff --git a/docs/operator-guides/upgrading-airbyte.md b/docs/operator-guides/upgrading-airbyte.md index 10d152049f4af1..acd9582b16dc1b 100644 --- a/docs/operator-guides/upgrading-airbyte.md +++ b/docs/operator-guides/upgrading-airbyte.md @@ -103,7 +103,7 @@ If you are upgrading from (i.e. your current version of Airbyte is) Airbyte vers Here's an example of what it might look like with the values filled in. It assumes that the downloaded `airbyte_archive.tar.gz` is in `/tmp`. ```bash - docker run --rm -v /tmp:/config airbyte/migration:0.36.5-alpha --\ + docker run --rm -v /tmp:/config airbyte/migration:0.36.6-alpha --\ --input /config/airbyte_archive.tar.gz\ --output /config/airbyte_archive_migrated.tar.gz ``` diff --git a/kube/overlays/stable-with-resource-limits/.env b/kube/overlays/stable-with-resource-limits/.env index f648fced94bb1d..1b6354b471841b 100644 --- a/kube/overlays/stable-with-resource-limits/.env +++ b/kube/overlays/stable-with-resource-limits/.env @@ -1,4 +1,4 @@ -AIRBYTE_VERSION=0.36.5-alpha +AIRBYTE_VERSION=0.36.6-alpha # Airbyte Internal Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db DATABASE_HOST=airbyte-db-svc diff --git a/kube/overlays/stable-with-resource-limits/kustomization.yaml b/kube/overlays/stable-with-resource-limits/kustomization.yaml index 7a0998ce279034..7e2fca766fe400 100644 --- a/kube/overlays/stable-with-resource-limits/kustomization.yaml +++ b/kube/overlays/stable-with-resource-limits/kustomization.yaml @@ -8,17 +8,17 @@ bases: images: - name: airbyte/db - newTag: 0.36.5-alpha + newTag: 0.36.6-alpha - name: airbyte/bootloader - newTag: 0.36.5-alpha + newTag: 0.36.6-alpha - name: airbyte/scheduler - newTag: 0.36.5-alpha + newTag: 0.36.6-alpha - name: airbyte/server - newTag: 0.36.5-alpha + newTag: 0.36.6-alpha - name: airbyte/webapp - newTag: 0.36.5-alpha + newTag: 0.36.6-alpha - name: airbyte/worker - newTag: 0.36.5-alpha + newTag: 0.36.6-alpha - name: temporalio/auto-setup newTag: 1.7.0 diff --git a/kube/overlays/stable/.env b/kube/overlays/stable/.env index 2a231f777ab9b6..a3fe3a2a989146 100644 --- a/kube/overlays/stable/.env +++ b/kube/overlays/stable/.env @@ -1,4 +1,4 @@ -AIRBYTE_VERSION=0.36.5-alpha +AIRBYTE_VERSION=0.36.6-alpha # Airbyte Internal Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db DATABASE_HOST=airbyte-db-svc diff --git a/kube/overlays/stable/kustomization.yaml b/kube/overlays/stable/kustomization.yaml index a59991df33dea2..d68e6d49ba26f4 100644 --- a/kube/overlays/stable/kustomization.yaml +++ b/kube/overlays/stable/kustomization.yaml @@ -8,17 +8,17 @@ bases: images: - name: airbyte/db - newTag: 0.36.5-alpha + newTag: 0.36.6-alpha - name: airbyte/bootloader - newTag: 0.36.5-alpha + newTag: 0.36.6-alpha - name: airbyte/scheduler - newTag: 0.36.5-alpha + newTag: 0.36.6-alpha - name: airbyte/server - newTag: 0.36.5-alpha + newTag: 0.36.6-alpha - name: airbyte/webapp - newTag: 0.36.5-alpha + newTag: 0.36.6-alpha - name: airbyte/worker - newTag: 0.36.5-alpha + newTag: 0.36.6-alpha - name: temporalio/auto-setup newTag: 1.7.0 diff --git a/octavia-cli/Dockerfile b/octavia-cli/Dockerfile index 291e9dbd7da3f1..75899e30eeebb5 100644 --- a/octavia-cli/Dockerfile +++ b/octavia-cli/Dockerfile @@ -14,5 +14,5 @@ USER octavia-cli WORKDIR /home/octavia-project ENTRYPOINT ["octavia"] -LABEL io.airbyte.version=0.36.5-alpha +LABEL io.airbyte.version=0.36.6-alpha LABEL io.airbyte.name=airbyte/octavia-cli diff --git a/octavia-cli/README.md b/octavia-cli/README.md index 2de64af914cc02..491a7025942ac6 100644 --- a/octavia-cli/README.md +++ b/octavia-cli/README.md @@ -105,7 +105,7 @@ This script: ```bash touch ~/.octavia # Create a file to store env variables that will be mapped the octavia-cli container mkdir my_octavia_project_directory # Create your octavia project directory where YAML configurations will be stored. -docker run --name octavia-cli -i --rm -v my_octavia_project_directory:/home/octavia-project --network host --user $(id -u):$(id -g) --env-file ~/.octavia airbyte/octavia-cli:0.36.5-alpha +docker run --name octavia-cli -i --rm -v my_octavia_project_directory:/home/octavia-project --network host --user $(id -u):$(id -g) --env-file ~/.octavia airbyte/octavia-cli:0.36.6-alpha ``` ### Using `docker-compose` diff --git a/octavia-cli/install.sh b/octavia-cli/install.sh index 032a375959f2b4..66da8b26d01e19 100755 --- a/octavia-cli/install.sh +++ b/octavia-cli/install.sh @@ -3,7 +3,7 @@ # This install scripts currently only works for ZSH and Bash profiles. # It creates an octavia alias in your profile bound to a docker run command and your current user. -VERSION=0.36.5-alpha +VERSION=0.36.6-alpha OCTAVIA_ENV_FILE=${HOME}/.octavia detect_profile() { diff --git a/octavia-cli/setup.py b/octavia-cli/setup.py index f50f51bb63ea1f..dcb27719e6b6c3 100644 --- a/octavia-cli/setup.py +++ b/octavia-cli/setup.py @@ -15,7 +15,7 @@ setup( name="octavia-cli", - version="0.36.5", + version="0.36.6", description="A command line interface to manage Airbyte configurations", long_description=README, author="Airbyte", From 35f2aa9aed8b8b5df9a778b86e2a85feb7804e56 Mon Sep 17 00:00:00 2001 From: LiRen Tu Date: Fri, 29 Apr 2022 23:14:58 -0700 Subject: [PATCH 041/152] =?UTF-8?q?=F0=9F=8E=89=20Jdbc=20sources:=20publis?= =?UTF-8?q?h=20new=20version=20with=20adaptive=20fetch=20size=20(#12480)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Default scaffold to use adaptive streaming config * Switch more connectors to use adaptive streaming config * Bump version for cockroach db * Bump version for db2 * Bump mssql version * Bump mysql version * Bump oracle version * Bump postgres version * Bump redshift version * Bump snowflake version * Bump tidb version * auto-bump connector version * Fix db2 findbug issue * auto-bump connector version * auto-bump connector version * auto-bump connector version * auto-bump connector version * Fix more findbug issues * auto-bump connector version * auto-bump connector version * auto-bump connector version * Fix findbug issue for mysql-strict-encrypt * Fix findbugs issue for oracle source * auto-bump connector version * Remove suppress warnings annotation * Fix oracle encrypt tests * Fix oracle encrypt acceptance test Co-authored-by: Octavia Squidington III --- .../resources/seed/source_definitions.yaml | 18 +++++------ .../src/main/resources/seed/source_specs.yaml | 31 +++++++++++++------ .../java/io/airbyte/db/jdbc/JdbcUtils.java | 12 +++++-- .../{{pascalCase name}}Source.java.hbs | 8 ++--- ...trictEncryptDestinationAcceptanceTest.java | 6 ++-- .../Dockerfile | 2 +- .../connectors/source-cockroachdb/Dockerfile | 2 +- .../source/cockroachdb/CockroachDbSource.java | 4 +-- .../source-db2-strict-encrypt/Dockerfile | 2 +- ...ncryptSourceCertificateAcceptanceTest.java | 3 +- .../Db2JdbcSourceAcceptanceTest.java | 3 +- .../connectors/source-db2/Dockerfile | 2 +- .../Db2Source.java | 3 +- .../Db2SourceCertificateAcceptanceTest.java | 3 +- .../source/jdbc/AbstractJdbcSource.java | 6 +++- .../jdbc/test/JdbcSourceAcceptanceTest.java | 11 +++++-- .../source-mssql-strict-encrypt/Dockerfile | 2 +- .../connectors/source-mssql/Dockerfile | 2 +- .../source-mysql-strict-encrypt/Dockerfile | 2 +- ...StrictEncryptJdbcSourceAcceptanceTest.java | 3 +- .../connectors/source-mysql/Dockerfile | 2 +- .../source-oracle-strict-encrypt/Dockerfile | 2 +- .../OracleSourceNneAcceptanceTest.java | 18 +++++------ ...acleStrictEncryptSourceAcceptanceTest.java | 4 +-- ...StrictEncryptJdbcSourceAcceptanceTest.java | 31 ++++++++++--------- .../connectors/source-oracle/Dockerfile | 2 +- .../source/oracle/OracleSource.java | 10 ++++-- .../OracleJdbcSourceAcceptanceTest.java | 18 +++++------ .../source/oracle/OracleSourceTest.java | 5 +-- .../source/oracle/OracleStressTest.java | 13 ++++---- .../source-postgres-strict-encrypt/Dockerfile | 2 +- .../connectors/source-postgres/Dockerfile | 2 +- .../connectors/source-redshift/Dockerfile | 2 +- .../ScaffoldJavaJdbcSource.java | 8 ++--- .../connectors/source-snowflake/Dockerfile | 2 +- .../connectors/source-tidb/Dockerfile | 2 +- .../integrations/source/tidb/TiDBSource.java | 4 +-- docs/integrations/sources/cockroachdb.md | 3 +- docs/integrations/sources/db2.md | 2 +- docs/integrations/sources/mssql.md | 2 +- docs/integrations/sources/mysql.md | 1 + docs/integrations/sources/oracle.md | 2 +- docs/integrations/sources/postgres.md | 2 +- docs/integrations/sources/redshift.md | 1 + docs/integrations/sources/snowflake.md | 1 + docs/integrations/sources/tidb.md | 1 + 46 files changed, 155 insertions(+), 112 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index a643e0e09d7978..2da12aedfb702f 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -147,7 +147,7 @@ - name: Cockroachdb sourceDefinitionId: 9fa5862c-da7c-11eb-8d19-0242ac130003 dockerRepository: airbyte/source-cockroachdb - dockerImageTag: 0.1.11 + dockerImageTag: 0.1.12 documentationUrl: https://docs.airbyte.io/integrations/sources/cockroachdb icon: cockroachdb.svg sourceType: database @@ -348,7 +348,7 @@ - name: IBM Db2 sourceDefinitionId: 447e0381-3780-4b46-bb62-00a4e3c8b8e2 dockerRepository: airbyte/source-db2 - dockerImageTag: 0.1.9 + dockerImageTag: 0.1.10 documentationUrl: https://docs.airbyte.io/integrations/sources/db2 icon: db2.svg sourceType: database @@ -459,7 +459,7 @@ - name: Microsoft SQL Server (MSSQL) sourceDefinitionId: b5ea17b1-f170-46dc-bc31-cc744ca984c1 dockerRepository: airbyte/source-mssql - dockerImageTag: 0.3.21 + dockerImageTag: 0.3.22 documentationUrl: https://docs.airbyte.io/integrations/sources/mssql icon: mssql.svg sourceType: database @@ -501,7 +501,7 @@ - name: MySQL sourceDefinitionId: 435bb9a5-7887-4809-aa58-28c27df0d7ad dockerRepository: airbyte/source-mysql - dockerImageTag: 0.5.9 + dockerImageTag: 0.5.10 documentationUrl: https://docs.airbyte.io/integrations/sources/mysql icon: mysql.svg sourceType: database @@ -535,7 +535,7 @@ - name: Oracle DB sourceDefinitionId: b39a7370-74c3-45a6-ac3a-380d48520a83 dockerRepository: airbyte/source-oracle - dockerImageTag: 0.3.14 + dockerImageTag: 0.3.15 documentationUrl: https://docs.airbyte.io/integrations/sources/oracle icon: oracle.svg sourceType: database @@ -625,7 +625,7 @@ - name: Postgres sourceDefinitionId: decd338e-5647-4c0b-adf4-da0e75f5a750 dockerRepository: airbyte/source-postgres - dockerImageTag: 0.4.11 + dockerImageTag: 0.4.12 documentationUrl: https://docs.airbyte.io/integrations/sources/postgres icon: postgresql.svg sourceType: database @@ -667,7 +667,7 @@ - name: Redshift sourceDefinitionId: e87ffa8e-a3b5-f69c-9076-6011339de1f6 dockerRepository: airbyte/source-redshift - dockerImageTag: 0.3.9 + dockerImageTag: 0.3.10 documentationUrl: https://docs.airbyte.io/integrations/sources/redshift icon: redshift.svg sourceType: database @@ -751,7 +751,7 @@ - name: Snowflake sourceDefinitionId: e2d65910-8c8b-40a1-ae7d-ee2416b2bfa2 dockerRepository: airbyte/source-snowflake - dockerImageTag: 0.1.11 + dockerImageTag: 0.1.12 documentationUrl: https://docs.airbyte.io/integrations/sources/snowflake icon: snowflake.svg sourceType: database @@ -792,7 +792,7 @@ - name: TiDB sourceDefinitionId: 0dad1a35-ccf8-4d03-b73e-6788c00b13ae dockerRepository: airbyte/source-tidb - dockerImageTag: 0.1.0 + dockerImageTag: 0.1.1 documentationUrl: https://docs.airbyte.io/integrations/sources/tidb icon: tidb.svg sourceType: database diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index 76795b74b9a1e0..95d0bd48606802 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -1305,7 +1305,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-cockroachdb:0.1.11" +- dockerImage: "airbyte/source-cockroachdb:0.1.12" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/cockroachdb" connectionSpecification: @@ -3578,7 +3578,7 @@ - - "client_secret" oauthFlowOutputParameters: - - "refresh_token" -- dockerImage: "airbyte/source-db2:0.1.9" +- dockerImage: "airbyte/source-db2:0.1.10" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/db2" connectionSpecification: @@ -4669,7 +4669,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-mssql:0.3.21" +- dockerImage: "airbyte/source-mssql:0.3.22" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/mssql" connectionSpecification: @@ -4687,6 +4687,7 @@ description: "The hostname of the database." title: "Host" type: "string" + order: 0 port: description: "The port of the database." title: "Port" @@ -4695,21 +4696,32 @@ maximum: 65536 examples: - "1433" + order: 1 database: description: "The name of the database." title: "Database" type: "string" examples: - "master" + order: 2 username: description: "The username which is used to access the database." title: "Username" type: "string" + order: 3 password: description: "The password associated with the username." title: "Password" type: "string" airbyte_secret: true + order: 4 + jdbc_url_params: + title: "JDBC URL Params" + description: "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + type: "string" + order: 5 ssl_method: title: "SSL Method" type: "object" @@ -4774,6 +4786,7 @@ enum: - "STANDARD" - "CDC" + order: 8 tunnel_method: type: "object" title: "SSH Tunnel Method" @@ -5368,7 +5381,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-mysql:0.5.9" +- dockerImage: "airbyte/source-mysql:0.5.10" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/mysql" connectionSpecification: @@ -5757,7 +5770,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-oracle:0.3.14" +- dockerImage: "airbyte/source-oracle:0.3.15" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/oracle" connectionSpecification: @@ -6478,7 +6491,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-postgres:0.4.11" +- dockerImage: "airbyte/source-postgres:0.4.12" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/postgres" connectionSpecification: @@ -6914,7 +6927,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-redshift:0.3.9" +- dockerImage: "airbyte/source-redshift:0.3.10" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/redshift" connectionSpecification: @@ -7972,7 +7985,7 @@ - - "client_secret" oauthFlowOutputParameters: - - "refresh_token" -- dockerImage: "airbyte/source-snowflake:0.1.11" +- dockerImage: "airbyte/source-snowflake:0.1.12" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/snowflake" connectionSpecification: @@ -8493,7 +8506,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-tidb:0.1.0" +- dockerImage: "airbyte/source-tidb:0.1.1" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/tidb" connectionSpecification: diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/jdbc/JdbcUtils.java b/airbyte-db/lib/src/main/java/io/airbyte/db/jdbc/JdbcUtils.java index 05caf59336b02d..50471c0da2034e 100644 --- a/airbyte-db/lib/src/main/java/io/airbyte/db/jdbc/JdbcUtils.java +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/jdbc/JdbcUtils.java @@ -29,17 +29,25 @@ public static String getFullyQualifiedTableName(final String schemaName, final S } public static Map parseJdbcParameters(final JsonNode config, final String jdbcUrlParamsKey) { + return parseJdbcParameters(config, jdbcUrlParamsKey, "&"); + } + + public static Map parseJdbcParameters(final JsonNode config, final String jdbcUrlParamsKey, final String delimiter) { if (config.has(jdbcUrlParamsKey)) { - return parseJdbcParameters(config.get(jdbcUrlParamsKey).asText()); + return parseJdbcParameters(config.get(jdbcUrlParamsKey).asText(), delimiter); } else { return Maps.newHashMap(); } } public static Map parseJdbcParameters(final String jdbcPropertiesString) { + return parseJdbcParameters(jdbcPropertiesString, "&"); + } + + public static Map parseJdbcParameters(final String jdbcPropertiesString, final String delimiter) { final Map parameters = new HashMap<>(); if (!jdbcPropertiesString.isBlank()) { - final String[] keyValuePairs = jdbcPropertiesString.split("&"); + final String[] keyValuePairs = jdbcPropertiesString.split(delimiter); for (final String kv : keyValuePairs) { final String[] split = kv.split("="); if (split.length == 2) { diff --git a/airbyte-integrations/connector-templates/source-java-jdbc/src/main/java/io/airbyte/integrations/source/{{snakeCase name}}/{{pascalCase name}}Source.java.hbs b/airbyte-integrations/connector-templates/source-java-jdbc/src/main/java/io/airbyte/integrations/source/{{snakeCase name}}/{{pascalCase name}}Source.java.hbs index d7babcc57203b0..3d3350f9a0106c 100644 --- a/airbyte-integrations/connector-templates/source-java-jdbc/src/main/java/io/airbyte/integrations/source/{{snakeCase name}}/{{pascalCase name}}Source.java.hbs +++ b/airbyte-integrations/connector-templates/source-java-jdbc/src/main/java/io/airbyte/integrations/source/{{snakeCase name}}/{{pascalCase name}}Source.java.hbs @@ -6,7 +6,7 @@ package io.airbyte.integrations.source.{{snakeCase name}}; import com.fasterxml.jackson.databind.JsonNode; import io.airbyte.db.jdbc.JdbcUtils; -import io.airbyte.db.jdbc.streaming.NoOpStreamingQueryConfig; +import io.airbyte.db.jdbc.streaming.AdaptiveStreamingQueryConfig; import io.airbyte.integrations.base.IntegrationRunner; import io.airbyte.integrations.base.Source; import io.airbyte.integrations.source.jdbc.AbstractJdbcSource; @@ -23,9 +23,9 @@ public class {{pascalCase name}}Source extends AbstractJdbcSource impl static final String DRIVER_CLASS = "driver_name_here"; public {{pascalCase name}}Source() { - // By default, NoOpStreamingQueryConfig class is used. If the JDBC supports custom - // fetch size, change it to AdaptiveStreamingQueryConfig for better performance. - super(DRIVER_CLASS, NoOpStreamingQueryConfig::new, JdbcUtils.getDefaultSourceOperations()); + // TODO: if the JDBC driver does not support custom fetch size, use NoOpStreamingQueryConfig + // instead of AdaptiveStreamingQueryConfig. + super(DRIVER_CLASS, AdaptiveStreamingQueryConfig::new, JdbcUtils.getDefaultSourceOperations()); } // TODO The config is based on spec.json, update according to your DB diff --git a/airbyte-integrations/connectors/destination-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/oracle_strict_encrypt/OracleStrictEncryptDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/oracle_strict_encrypt/OracleStrictEncryptDestinationAcceptanceTest.java index b360964abcc4fb..6cfde5013241d5 100644 --- a/airbyte-integrations/connectors/destination-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/oracle_strict_encrypt/OracleStrictEncryptDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/oracle_strict_encrypt/OracleStrictEncryptDestinationAcceptanceTest.java @@ -178,8 +178,7 @@ public void testEncryption() throws SQLException { config.get("sid").asText()), "oracle.jdbc.driver.OracleDriver", JdbcUtils.parseJdbcParameters("oracle.net.encryption_client=REQUIRED;" + - "oracle.net.encryption_types_client=( " - + algorithm + " )")); + "oracle.net.encryption_types_client=( " + algorithm + " )", ";")); final String network_service_banner = "select network_service_banner from v$session_connect_info where sid in (select distinct sid from v$mystat)"; @@ -204,8 +203,7 @@ public void testCheckProtocol() throws SQLException { clone.get("sid").asText()), "oracle.jdbc.driver.OracleDriver", JdbcUtils.parseJdbcParameters("oracle.net.encryption_client=REQUIRED;" + - "oracle.net.encryption_types_client=( " - + algorithm + " )")); + "oracle.net.encryption_types_client=( " + algorithm + " )", ";")); final String network_service_banner = "SELECT sys_context('USERENV', 'NETWORK_PROTOCOL') as network_protocol FROM dual"; final List collect = database.unsafeQuery(network_service_banner).collect(Collectors.toList()); diff --git a/airbyte-integrations/connectors/source-cockroachdb-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/source-cockroachdb-strict-encrypt/Dockerfile index 5e07282f511249..f68558cfd60772 100644 --- a/airbyte-integrations/connectors/source-cockroachdb-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/source-cockroachdb-strict-encrypt/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-cockroachdb-strict-encrypt COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.8 +LABEL io.airbyte.version=0.1.12 LABEL io.airbyte.name=airbyte/source-cockroachdb-strict-encrypt diff --git a/airbyte-integrations/connectors/source-cockroachdb/Dockerfile b/airbyte-integrations/connectors/source-cockroachdb/Dockerfile index 8a8456ed239de8..8ae1e6922049af 100644 --- a/airbyte-integrations/connectors/source-cockroachdb/Dockerfile +++ b/airbyte-integrations/connectors/source-cockroachdb/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-cockroachdb COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.11 +LABEL io.airbyte.version=0.1.12 LABEL io.airbyte.name=airbyte/source-cockroachdb diff --git a/airbyte-integrations/connectors/source-cockroachdb/src/main/java/io/airbyte/integrations/source/cockroachdb/CockroachDbSource.java b/airbyte-integrations/connectors/source-cockroachdb/src/main/java/io/airbyte/integrations/source/cockroachdb/CockroachDbSource.java index bd2ebe01a3b363..dfe64063cf5923 100644 --- a/airbyte-integrations/connectors/source-cockroachdb/src/main/java/io/airbyte/integrations/source/cockroachdb/CockroachDbSource.java +++ b/airbyte-integrations/connectors/source-cockroachdb/src/main/java/io/airbyte/integrations/source/cockroachdb/CockroachDbSource.java @@ -12,7 +12,7 @@ import io.airbyte.db.Databases; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.db.jdbc.JdbcUtils; -import io.airbyte.db.jdbc.streaming.NoOpStreamingQueryConfig; +import io.airbyte.db.jdbc.streaming.AdaptiveStreamingQueryConfig; import io.airbyte.integrations.base.IntegrationRunner; import io.airbyte.integrations.base.Source; import io.airbyte.integrations.base.ssh.SshWrappedSource; @@ -41,7 +41,7 @@ public class CockroachDbSource extends AbstractJdbcSource { public static final List PORT_KEY = List.of("port"); public CockroachDbSource() { - super(DRIVER_CLASS, NoOpStreamingQueryConfig::new, new CockroachJdbcSourceOperations()); + super(DRIVER_CLASS, AdaptiveStreamingQueryConfig::new, new CockroachJdbcSourceOperations()); } public static Source sshWrappedSource() { diff --git a/airbyte-integrations/connectors/source-db2-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/source-db2-strict-encrypt/Dockerfile index f992a81d9cc05d..dfa81a644c1e6e 100644 --- a/airbyte-integrations/connectors/source-db2-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/source-db2-strict-encrypt/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-db2-strict-encrypt COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.6 +LABEL io.airbyte.version=0.1.10 LABEL io.airbyte.name=airbyte/source-db2-strict-encrypt diff --git a/airbyte-integrations/connectors/source-db2-strict-encrypt/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/Db2StrictEncryptSourceCertificateAcceptanceTest.java b/airbyte-integrations/connectors/source-db2-strict-encrypt/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/Db2StrictEncryptSourceCertificateAcceptanceTest.java index 63c23e74177a43..a21c00c2f0100e 100644 --- a/airbyte-integrations/connectors/source-db2-strict-encrypt/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/Db2StrictEncryptSourceCertificateAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-db2-strict-encrypt/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/Db2StrictEncryptSourceCertificateAcceptanceTest.java @@ -25,6 +25,7 @@ import java.io.File; import java.io.IOException; import java.io.PrintWriter; +import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.concurrent.TimeUnit; import org.testcontainers.containers.Db2Container; @@ -181,7 +182,7 @@ private String getCertificate() throws IOException, InterruptedException { private static void convertAndImportCertificate(final String certificate) throws IOException, InterruptedException { final Runtime run = Runtime.getRuntime(); - try (final PrintWriter out = new PrintWriter("certificate.pem")) { + try (final PrintWriter out = new PrintWriter("certificate.pem", StandardCharsets.UTF_8)) { out.print(certificate); } runProcess("openssl x509 -outform der -in certificate.pem -out certificate.der", run); diff --git a/airbyte-integrations/connectors/source-db2-strict-encrypt/src/test/java/io/airbyte/integrations/source/db2_strict_encrypt/Db2JdbcSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-db2-strict-encrypt/src/test/java/io/airbyte/integrations/source/db2_strict_encrypt/Db2JdbcSourceAcceptanceTest.java index 0627551bfeb2a1..61f79b0655d996 100644 --- a/airbyte-integrations/connectors/source-db2-strict-encrypt/src/test/java/io/airbyte/integrations/source/db2_strict_encrypt/Db2JdbcSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-db2-strict-encrypt/src/test/java/io/airbyte/integrations/source/db2_strict_encrypt/Db2JdbcSourceAcceptanceTest.java @@ -19,6 +19,7 @@ import java.io.File; import java.io.IOException; import java.io.PrintWriter; +import java.nio.charset.StandardCharsets; import java.sql.JDBCType; import java.util.Collections; import java.util.Set; @@ -186,7 +187,7 @@ private static String getCertificate() throws IOException, InterruptedException private static void convertAndImportCertificate(final String certificate) throws IOException, InterruptedException { final Runtime run = Runtime.getRuntime(); - try (final PrintWriter out = new PrintWriter("certificate.pem")) { + try (final PrintWriter out = new PrintWriter("certificate.pem", StandardCharsets.UTF_8)) { out.print(certificate); } runProcess("openssl x509 -outform der -in certificate.pem -out certificate.der", run); diff --git a/airbyte-integrations/connectors/source-db2/Dockerfile b/airbyte-integrations/connectors/source-db2/Dockerfile index f658419be0068c..d2e3e152e7cc96 100644 --- a/airbyte-integrations/connectors/source-db2/Dockerfile +++ b/airbyte-integrations/connectors/source-db2/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-db2 COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.9 +LABEL io.airbyte.version=0.1.10 LABEL io.airbyte.name=airbyte/source-db2 diff --git a/airbyte-integrations/connectors/source-db2/src/main/java/io.airbyte.integrations.source.db2/Db2Source.java b/airbyte-integrations/connectors/source-db2/src/main/java/io.airbyte.integrations.source.db2/Db2Source.java index 74b897c7d93b21..f48de8c9e2dc1a 100644 --- a/airbyte-integrations/connectors/source-db2/src/main/java/io.airbyte.integrations.source.db2/Db2Source.java +++ b/airbyte-integrations/connectors/source-db2/src/main/java/io.airbyte.integrations.source.db2/Db2Source.java @@ -16,6 +16,7 @@ import io.airbyte.integrations.source.jdbc.dto.JdbcPrivilegeDto; import java.io.IOException; import java.io.PrintWriter; +import java.nio.charset.StandardCharsets; import java.sql.Connection; import java.sql.JDBCType; import java.sql.PreparedStatement; @@ -143,7 +144,7 @@ private static String getKeyStorePassword(final JsonNode encryptionKeyStorePassw private static void convertAndImportCertificate(final String certificate, final String keyStorePassword) throws IOException, InterruptedException { final Runtime run = Runtime.getRuntime(); - try (final PrintWriter out = new PrintWriter("certificate.pem")) { + try (final PrintWriter out = new PrintWriter("certificate.pem", StandardCharsets.UTF_8)) { out.print(certificate); } runProcess("openssl x509 -outform der -in certificate.pem -out certificate.der", run); diff --git a/airbyte-integrations/connectors/source-db2/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/Db2SourceCertificateAcceptanceTest.java b/airbyte-integrations/connectors/source-db2/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/Db2SourceCertificateAcceptanceTest.java index 3aa28e9dcaf213..f35bbb52e2095c 100644 --- a/airbyte-integrations/connectors/source-db2/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/Db2SourceCertificateAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-db2/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/Db2SourceCertificateAcceptanceTest.java @@ -25,6 +25,7 @@ import java.io.File; import java.io.IOException; import java.io.PrintWriter; +import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.concurrent.TimeUnit; import org.testcontainers.containers.Db2Container; @@ -175,7 +176,7 @@ private String getCertificate() throws IOException, InterruptedException { private static void convertAndImportCertificate(final String certificate) throws IOException, InterruptedException { final Runtime run = Runtime.getRuntime(); - try (final PrintWriter out = new PrintWriter("certificate.pem")) { + try (final PrintWriter out = new PrintWriter("certificate.pem", StandardCharsets.UTF_8)) { out.print(certificate); } runProcess("openssl x509 -outform der -in certificate.pem -out certificate.der", run); diff --git a/airbyte-integrations/connectors/source-jdbc/src/main/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSource.java b/airbyte-integrations/connectors/source-jdbc/src/main/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSource.java index 3ca4791c2981cc..d7ee2d9380ee3a 100644 --- a/airbyte-integrations/connectors/source-jdbc/src/main/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSource.java +++ b/airbyte-integrations/connectors/source-jdbc/src/main/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSource.java @@ -295,7 +295,7 @@ public JdbcDatabase createDatabase(final JsonNode config) throws SQLException { jdbcConfig.get("jdbc_url").asText(), driverClass, streamingQueryConfigProvider, - JdbcUtils.parseJdbcParameters(jdbcConfig, "connection_properties"), + JdbcUtils.parseJdbcParameters(jdbcConfig, "connection_properties", getJdbcParameterDelimiter()), sourceOperations); quoteString = (quoteString == null ? database.getMetaData().getIdentifierQuoteString() : quoteString); @@ -303,4 +303,8 @@ public JdbcDatabase createDatabase(final JsonNode config) throws SQLException { return database; } + protected String getJdbcParameterDelimiter() { + return "&"; + } + } diff --git a/airbyte-integrations/connectors/source-jdbc/src/testFixtures/java/io/airbyte/integrations/source/jdbc/test/JdbcSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-jdbc/src/testFixtures/java/io/airbyte/integrations/source/jdbc/test/JdbcSourceAcceptanceTest.java index 8511ad9d052060..283de93b8d2847 100644 --- a/airbyte-integrations/connectors/source-jdbc/src/testFixtures/java/io/airbyte/integrations/source/jdbc/test/JdbcSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-jdbc/src/testFixtures/java/io/airbyte/integrations/source/jdbc/test/JdbcSourceAcceptanceTest.java @@ -25,6 +25,7 @@ import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.db.jdbc.JdbcSourceOperations; import io.airbyte.db.jdbc.JdbcUtils; +import io.airbyte.db.jdbc.streaming.AdaptiveStreamingQueryConfig; import io.airbyte.integrations.base.Source; import io.airbyte.integrations.source.jdbc.AbstractJdbcSource; import io.airbyte.integrations.source.relationaldb.models.DbState; @@ -185,6 +186,10 @@ protected String primaryKeyClause(final List columns) { return clause.toString(); } + protected String getJdbcParameterDelimiter() { + return "&"; + } + public void setup() throws Exception { source = getSource(); config = getConfig(); @@ -192,12 +197,14 @@ public void setup() throws Exception { streamName = TABLE_NAME; - database = Databases.createJdbcDatabase( + database = Databases.createStreamingJdbcDatabase( jdbcConfig.get("username").asText(), jdbcConfig.has("password") ? jdbcConfig.get("password").asText() : null, jdbcConfig.get("jdbc_url").asText(), getDriverClass(), - JdbcUtils.parseJdbcParameters(jdbcConfig, "connection_properties")); + AdaptiveStreamingQueryConfig::new, + JdbcUtils.parseJdbcParameters(jdbcConfig, "connection_properties", getJdbcParameterDelimiter()), + JdbcUtils.getDefaultSourceOperations()); if (supportsSchemas()) { createSchemas(); diff --git a/airbyte-integrations/connectors/source-mssql-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/source-mssql-strict-encrypt/Dockerfile index 015453ecde72ad..ab3e22af5dcbce 100644 --- a/airbyte-integrations/connectors/source-mssql-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/source-mssql-strict-encrypt/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-mssql-strict-encrypt COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.11 +LABEL io.airbyte.version=0.3.22 LABEL io.airbyte.name=airbyte/source-mssql-strict-encrypt diff --git a/airbyte-integrations/connectors/source-mssql/Dockerfile b/airbyte-integrations/connectors/source-mssql/Dockerfile index f826af5a2e5287..8091bbc9be8f09 100644 --- a/airbyte-integrations/connectors/source-mssql/Dockerfile +++ b/airbyte-integrations/connectors/source-mssql/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-mssql COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.3.21 +LABEL io.airbyte.version=0.3.22 LABEL io.airbyte.name=airbyte/source-mssql diff --git a/airbyte-integrations/connectors/source-mysql-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/source-mysql-strict-encrypt/Dockerfile index a7759c8c9a5d88..4a95c3c9cd047e 100644 --- a/airbyte-integrations/connectors/source-mysql-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/source-mysql-strict-encrypt/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-mysql-strict-encrypt COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.12 +LABEL io.airbyte.version=0.5.10 LABEL io.airbyte.name=airbyte/source-mysql-strict-encrypt diff --git a/airbyte-integrations/connectors/source-mysql-strict-encrypt/src/test/java/io/airbyte/integrations/source/mysql_strict_encrypt/MySqlStrictEncryptJdbcSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mysql-strict-encrypt/src/test/java/io/airbyte/integrations/source/mysql_strict_encrypt/MySqlStrictEncryptJdbcSourceAcceptanceTest.java index 6484aed0460a17..d6d38b2419c83c 100644 --- a/airbyte-integrations/connectors/source-mysql-strict-encrypt/src/test/java/io/airbyte/integrations/source/mysql_strict_encrypt/MySqlStrictEncryptJdbcSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mysql-strict-encrypt/src/test/java/io/airbyte/integrations/source/mysql_strict_encrypt/MySqlStrictEncryptJdbcSourceAcceptanceTest.java @@ -36,7 +36,6 @@ class MySqlStrictEncryptJdbcSourceAcceptanceTest extends JdbcSourceAcceptanceTes protected static final String TEST_PASSWORD = "test"; protected static MySQLContainer container; - protected JsonNode config; protected Database database; @BeforeAll @@ -47,7 +46,7 @@ static void init() throws SQLException { .withEnv("MYSQL_ROOT_HOST", "%") .withEnv("MYSQL_ROOT_PASSWORD", TEST_PASSWORD); container.start(); - final Connection connection = DriverManager.getConnection(container.getJdbcUrl(), "root", TEST_PASSWORD); + final Connection connection = DriverManager.getConnection(container.getJdbcUrl(), "root", container.getPassword()); connection.createStatement().execute("GRANT ALL PRIVILEGES ON *.* TO '" + TEST_USER + "'@'%';\n"); } diff --git a/airbyte-integrations/connectors/source-mysql/Dockerfile b/airbyte-integrations/connectors/source-mysql/Dockerfile index 952f96f2c0d7c6..1072e6d436724d 100644 --- a/airbyte-integrations/connectors/source-mysql/Dockerfile +++ b/airbyte-integrations/connectors/source-mysql/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-mysql COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.5.9 +LABEL io.airbyte.version=0.5.10 LABEL io.airbyte.name=airbyte/source-mysql diff --git a/airbyte-integrations/connectors/source-oracle-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/source-oracle-strict-encrypt/Dockerfile index f64d7feb84a271..96726701554ad1 100644 --- a/airbyte-integrations/connectors/source-oracle-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/source-oracle-strict-encrypt/Dockerfile @@ -17,5 +17,5 @@ ENV TZ UTC COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.6 +LABEL io.airbyte.version=0.3.15 LABEL io.airbyte.name=airbyte/source-oracle-strict-encrypt diff --git a/airbyte-integrations/connectors/source-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/oracle_strict_encrypt/OracleSourceNneAcceptanceTest.java b/airbyte-integrations/connectors/source-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/oracle_strict_encrypt/OracleSourceNneAcceptanceTest.java index 0705682671001f..f012936d4649e9 100644 --- a/airbyte-integrations/connectors/source-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/oracle_strict_encrypt/OracleSourceNneAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/oracle_strict_encrypt/OracleSourceNneAcceptanceTest.java @@ -16,15 +16,14 @@ import io.airbyte.db.jdbc.JdbcUtils; import java.sql.SQLException; import java.util.List; -import java.util.stream.Collectors; import org.junit.jupiter.api.Test; public class OracleSourceNneAcceptanceTest extends OracleStrictEncryptSourceAcceptanceTest { @Test - public void testEncrytion() throws SQLException { - final JsonNode clone = Jsons.clone(getConfig()); - ((ObjectNode) clone).put("encryption", Jsons.jsonNode(ImmutableMap.builder() + public void testEncryption() throws SQLException { + final ObjectNode clone = (ObjectNode) Jsons.clone(getConfig()); + clone.set("encryption", Jsons.jsonNode(ImmutableMap.builder() .put("encryption_method", "client_nne") .put("encryption_algorithm", "3DES168") .build())); @@ -45,7 +44,7 @@ public void testEncrytion() throws SQLException { final String network_service_banner = "select network_service_banner from v$session_connect_info where sid in (select distinct sid from v$mystat)"; - final List collect = database.unsafeQuery(network_service_banner).collect(Collectors.toList()); + final List collect = database.unsafeQuery(network_service_banner).toList(); assertTrue(collect.get(2).get("NETWORK_SERVICE_BANNER").asText() .contains(algorithm + " Encryption")); @@ -53,8 +52,8 @@ public void testEncrytion() throws SQLException { @Test public void testCheckProtocol() throws SQLException { - final JsonNode clone = Jsons.clone(getConfig()); - ((ObjectNode) clone).put("encryption", Jsons.jsonNode(ImmutableMap.builder() + final ObjectNode clone = (ObjectNode) Jsons.clone(getConfig()); + clone.set("encryption", Jsons.jsonNode(ImmutableMap.builder() .put("encryption_method", "client_nne") .put("encryption_algorithm", "AES256") .build())); @@ -70,11 +69,10 @@ public void testCheckProtocol() throws SQLException { clone.get("sid").asText()), "oracle.jdbc.driver.OracleDriver", JdbcUtils.parseJdbcParameters("oracle.net.encryption_client=REQUIRED;" + - "oracle.net.encryption_types_client=( " - + algorithm + " )")); + "oracle.net.encryption_types_client=( " + algorithm + " )", ";")); final String network_service_banner = "SELECT sys_context('USERENV', 'NETWORK_PROTOCOL') as network_protocol FROM dual"; - final List collect = database.unsafeQuery(network_service_banner).collect(Collectors.toList()); + final List collect = database.unsafeQuery(network_service_banner).toList(); assertEquals("tcp", collect.get(0).get("NETWORK_PROTOCOL").asText()); } diff --git a/airbyte-integrations/connectors/source-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/oracle_strict_encrypt/OracleStrictEncryptSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/oracle_strict_encrypt/OracleStrictEncryptSourceAcceptanceTest.java index 150c47cc904589..0106bd75a51292 100644 --- a/airbyte-integrations/connectors/source-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/oracle_strict_encrypt/OracleStrictEncryptSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/oracle_strict_encrypt/OracleStrictEncryptSourceAcceptanceTest.java @@ -61,8 +61,8 @@ protected void setupEnvironment(final TestDestinationEnv environment) throws Exc config.get("port").asText(), config.get("sid").asText()), "oracle.jdbc.driver.OracleDriver", - JdbcUtils.parseJdbcParameters("oracle.net.encryption_client=REQUIRED&" + - "oracle.net.encryption_types_client=( 3DES168 )")); + JdbcUtils.parseJdbcParameters("oracle.net.encryption_client=REQUIRED;" + + "oracle.net.encryption_types_client=( 3DES168 )", ";")); database.execute(connection -> { connection.createStatement().execute("CREATE USER JDBC_SPACE IDENTIFIED BY JDBC_SPACE DEFAULT TABLESPACE USERS QUOTA UNLIMITED ON USERS"); diff --git a/airbyte-integrations/connectors/source-oracle-strict-encrypt/src/test/java/io/airbyte/integrations/source/oracle_strict_encrypt/OracleStrictEncryptJdbcSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-oracle-strict-encrypt/src/test/java/io/airbyte/integrations/source/oracle_strict_encrypt/OracleStrictEncryptJdbcSourceAcceptanceTest.java index 437869f2de4639..6311c64b820a04 100644 --- a/airbyte-integrations/connectors/source-oracle-strict-encrypt/src/test/java/io/airbyte/integrations/source/oracle_strict_encrypt/OracleStrictEncryptJdbcSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-oracle-strict-encrypt/src/test/java/io/airbyte/integrations/source/oracle_strict_encrypt/OracleStrictEncryptJdbcSourceAcceptanceTest.java @@ -56,14 +56,6 @@ class OracleStrictEncryptJdbcSourceAcceptanceTest extends JdbcSourceAcceptanceTe @BeforeAll static void init() { - ORACLE_DB = new OracleContainer("epiclabs/docker-oracle-xe-11g") - .withEnv("NLS_DATE_FORMAT", "YYYY-MM-DD"); - ORACLE_DB.start(); - } - - @BeforeEach - public void setup() throws Exception { - SCHEMA_NAME = "JDBC_INTEGRATION_TEST1"; SCHEMA_NAME2 = "JDBC_INTEGRATION_TEST2"; TEST_SCHEMAS = ImmutableSet.of(SCHEMA_NAME, SCHEMA_NAME2); @@ -84,6 +76,13 @@ public void setup() throws Exception { ID_VALUE_4 = new BigDecimal(4); ID_VALUE_5 = new BigDecimal(5); + ORACLE_DB = new OracleContainer("epiclabs/docker-oracle-xe-11g") + .withEnv("NLS_DATE_FORMAT", "YYYY-MM-DD"); + ORACLE_DB.start(); + } + + @BeforeEach + public void setup() throws Exception { config = Jsons.jsonNode(ImmutableMap.builder() .put("host", ORACLE_DB.getHost()) .put("port", ORACLE_DB.getFirstMappedPort()) @@ -126,9 +125,8 @@ void cleanUpTables() throws SQLException { conn.createStatement().executeQuery(String.format("SELECT TABLE_NAME FROM ALL_TABLES WHERE OWNER = '%s'", schemaName)); while (resultSet.next()) { final String tableName = resultSet.getString("TABLE_NAME"); - final String tableNameProcessed = tableName.contains(" ") ? sourceOperations - .enquoteIdentifier(conn, tableName) : tableName; - conn.createStatement().executeQuery(String.format("DROP TABLE %s.%s", schemaName, tableNameProcessed)); + final String tableNameProcessed = tableName.contains(" ") ? sourceOperations.enquoteIdentifier(conn, tableName) : tableName; + conn.createStatement().executeQuery("DROP TABLE " + schemaName + "." + tableNameProcessed); } } if (!conn.isClosed()) @@ -180,7 +178,12 @@ public void createSchemas() throws SQLException { } } - public void executeOracleStatement(final String query) throws SQLException { + @Override + protected String getJdbcParameterDelimiter() { + return ";"; + } + + public void executeOracleStatement(final String query) { try (final Connection conn = DriverManager.getConnection( ORACLE_DB.getJdbcUrl(), ORACLE_DB.getUsername(), @@ -194,8 +197,8 @@ public void executeOracleStatement(final String query) throws SQLException { public static void logSQLException(final SQLException ex) { for (final Throwable e : ex) { - if (e instanceof SQLException) { - if (ignoreSQLException(((SQLException) e).getSQLState()) == false) { + if (e instanceof final SQLException sqlException) { + if (!ignoreSQLException(sqlException.getSQLState())) { LOGGER.info("SQLState: " + ((SQLException) e).getSQLState()); LOGGER.info("Error Code: " + ((SQLException) e).getErrorCode()); LOGGER.info("Message: " + e.getMessage()); diff --git a/airbyte-integrations/connectors/source-oracle/Dockerfile b/airbyte-integrations/connectors/source-oracle/Dockerfile index dc2ba25e64557e..cfc0c7aeaa3982 100644 --- a/airbyte-integrations/connectors/source-oracle/Dockerfile +++ b/airbyte-integrations/connectors/source-oracle/Dockerfile @@ -8,5 +8,5 @@ ENV TZ UTC COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar RUN tar xf ${APPLICATION}.tar --strip-components=1 -LABEL io.airbyte.version=0.3.14 +LABEL io.airbyte.version=0.3.15 LABEL io.airbyte.name=airbyte/source-oracle diff --git a/airbyte-integrations/connectors/source-oracle/src/main/java/io/airbyte/integrations/source/oracle/OracleSource.java b/airbyte-integrations/connectors/source-oracle/src/main/java/io/airbyte/integrations/source/oracle/OracleSource.java index 94813f54fff3e8..acd8e04c6a58ba 100644 --- a/airbyte-integrations/connectors/source-oracle/src/main/java/io/airbyte/integrations/source/oracle/OracleSource.java +++ b/airbyte-integrations/connectors/source-oracle/src/main/java/io/airbyte/integrations/source/oracle/OracleSource.java @@ -18,6 +18,7 @@ import io.airbyte.protocol.models.CommonField; import java.io.IOException; import java.io.PrintWriter; +import java.nio.charset.StandardCharsets; import java.sql.JDBCType; import java.util.ArrayList; import java.util.List; @@ -92,7 +93,7 @@ public JsonNode toDatabaseConfig(final JsonNode config) { } } if (!additionalParameters.isEmpty()) { - final String connectionParams = String.join(";", additionalParameters); + final String connectionParams = String.join(getJdbcParameterDelimiter(), additionalParameters); configBuilder.put("connection_properties", connectionParams); } @@ -129,7 +130,7 @@ private Protocol obtainConnectionProtocol(final JsonNode encryption, final List< private static void convertAndImportCertificate(final String certificate) throws IOException, InterruptedException { final Runtime run = Runtime.getRuntime(); - try (final PrintWriter out = new PrintWriter("certificate.pem")) { + try (final PrintWriter out = new PrintWriter("certificate.pem", StandardCharsets.UTF_8)) { out.print(certificate); } runProcess("openssl x509 -outform der -in certificate.pem -out certificate.der", run); @@ -170,6 +171,11 @@ public Set getExcludedInternalNameSpaces() { return Set.of(); } + @Override + protected String getJdbcParameterDelimiter() { + return ";"; + } + public static void main(final String[] args) throws Exception { final Source source = OracleSource.sshWrappedSource(); LOGGER.info("starting source: {}", OracleSource.class); diff --git a/airbyte-integrations/connectors/source-oracle/src/test/java/io/airbyte/integrations/source/oracle/OracleJdbcSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-oracle/src/test/java/io/airbyte/integrations/source/oracle/OracleJdbcSourceAcceptanceTest.java index 0d5c7a604403f0..a608165340c964 100644 --- a/airbyte-integrations/connectors/source-oracle/src/test/java/io/airbyte/integrations/source/oracle/OracleJdbcSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-oracle/src/test/java/io/airbyte/integrations/source/oracle/OracleJdbcSourceAcceptanceTest.java @@ -55,15 +55,6 @@ class OracleJdbcSourceAcceptanceTest extends JdbcSourceAcceptanceTest { @BeforeAll static void init() { // Oracle returns uppercase values - - ORACLE_DB = new OracleContainer("epiclabs/docker-oracle-xe-11g") - .withEnv("NLS_DATE_FORMAT", "YYYY-MM-DD"); - ORACLE_DB.start(); - } - - @BeforeEach - public void setup() throws Exception { - SCHEMA_NAME = "JDBC_INTEGRATION_TEST1"; SCHEMA_NAME2 = "JDBC_INTEGRATION_TEST2"; TEST_SCHEMAS = ImmutableSet.of(SCHEMA_NAME, SCHEMA_NAME2); @@ -84,6 +75,13 @@ public void setup() throws Exception { ID_VALUE_4 = new BigDecimal(4); ID_VALUE_5 = new BigDecimal(5); + ORACLE_DB = new OracleContainer("epiclabs/docker-oracle-xe-11g") + .withEnv("NLS_DATE_FORMAT", "YYYY-MM-DD"); + ORACLE_DB.start(); + } + + @BeforeEach + public void setup() throws Exception { config = Jsons.jsonNode(ImmutableMap.builder() .put("host", ORACLE_DB.getHost()) .put("port", ORACLE_DB.getFirstMappedPort()) @@ -124,7 +122,7 @@ void cleanUpTables() throws SQLException { final String tableName = resultSet.getString("TABLE_NAME"); final String tableNameProcessed = tableName.contains(" ") ? sourceOperations .enquoteIdentifier(conn, tableName) : tableName; - conn.createStatement().executeQuery(String.format("DROP TABLE %s.%s", schemaName, tableNameProcessed)); + conn.createStatement().executeQuery("DROP TABLE " + schemaName + "." + tableNameProcessed); } } if (!conn.isClosed()) diff --git a/airbyte-integrations/connectors/source-oracle/src/test/java/io/airbyte/integrations/source/oracle/OracleSourceTest.java b/airbyte-integrations/connectors/source-oracle/src/test/java/io/airbyte/integrations/source/oracle/OracleSourceTest.java index 529cbaeff4c924..ff86b31c011fdc 100644 --- a/airbyte-integrations/connectors/source-oracle/src/test/java/io/airbyte/integrations/source/oracle/OracleSourceTest.java +++ b/airbyte-integrations/connectors/source-oracle/src/test/java/io/airbyte/integrations/source/oracle/OracleSourceTest.java @@ -24,6 +24,7 @@ import io.airbyte.protocol.models.JsonSchemaType; import io.airbyte.protocol.models.SyncMode; import java.math.BigDecimal; +import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -45,8 +46,8 @@ class OracleSourceTest { Field.of("IMAGE", JsonSchemaType.STRING)) .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)))); private static final ConfiguredAirbyteCatalog CONFIGURED_CATALOG = CatalogHelpers.toDefaultConfiguredCatalog(CATALOG); - private static final Set ASCII_MESSAGES = Sets.newHashSet( - createRecord(STREAM_NAME, map("ID", new BigDecimal("1.0"), "NAME", "user", "IMAGE", "last_summer.png".getBytes()))); + private static final Set ASCII_MESSAGES = Sets.newHashSet(createRecord(STREAM_NAME, + map("ID", new BigDecimal("1.0"), "NAME", "user", "IMAGE", "last_summer.png".getBytes(StandardCharsets.UTF_8)))); private static OracleContainer ORACLE_DB; diff --git a/airbyte-integrations/connectors/source-oracle/src/test/java/io/airbyte/integrations/source/oracle/OracleStressTest.java b/airbyte-integrations/connectors/source-oracle/src/test/java/io/airbyte/integrations/source/oracle/OracleStressTest.java index cdca797efd006d..cbc25fd3aa208e 100644 --- a/airbyte-integrations/connectors/source-oracle/src/test/java/io/airbyte/integrations/source/oracle/OracleStressTest.java +++ b/airbyte-integrations/connectors/source-oracle/src/test/java/io/airbyte/integrations/source/oracle/OracleStressTest.java @@ -38,18 +38,18 @@ class OracleStressTest extends JdbcStressTest { @BeforeAll static void init() { - ORACLE_DB = new OracleContainer("epiclabs/docker-oracle-xe-11g"); - ORACLE_DB.start(); - } - - @BeforeEach - public void setup() throws Exception { TABLE_NAME = "ID_AND_NAME"; COL_ID = "ID"; COL_NAME = "NAME"; COL_ID_TYPE = "NUMBER(38,0)"; INSERT_STATEMENT = "INTO id_and_name (id, name) VALUES (%s,'picard-%s')"; + ORACLE_DB = new OracleContainer("epiclabs/docker-oracle-xe-11g"); + ORACLE_DB.start(); + } + + @BeforeEach + public void setup() throws Exception { config = Jsons.jsonNode(ImmutableMap.builder() .put("host", ORACLE_DB.getHost()) .put("port", ORACLE_DB.getFirstMappedPort()) @@ -57,7 +57,6 @@ public void setup() throws Exception { .put("username", ORACLE_DB.getUsername()) .put("password", ORACLE_DB.getPassword()) .build()); - super.setup(); } diff --git a/airbyte-integrations/connectors/source-postgres-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/source-postgres-strict-encrypt/Dockerfile index 6b6b28beb9abb3..e2fdc14b44d95a 100644 --- a/airbyte-integrations/connectors/source-postgres-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/source-postgres-strict-encrypt/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-postgres-strict-encrypt COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.11 +LABEL io.airbyte.version=0.4.12 LABEL io.airbyte.name=airbyte/source-postgres-strict-encrypt diff --git a/airbyte-integrations/connectors/source-postgres/Dockerfile b/airbyte-integrations/connectors/source-postgres/Dockerfile index 6e5cc0d203778b..9ae29ae6eb2250 100644 --- a/airbyte-integrations/connectors/source-postgres/Dockerfile +++ b/airbyte-integrations/connectors/source-postgres/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-postgres COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.4.11 +LABEL io.airbyte.version=0.4.12 LABEL io.airbyte.name=airbyte/source-postgres diff --git a/airbyte-integrations/connectors/source-redshift/Dockerfile b/airbyte-integrations/connectors/source-redshift/Dockerfile index af8cc61f418734..a743455d2227c1 100644 --- a/airbyte-integrations/connectors/source-redshift/Dockerfile +++ b/airbyte-integrations/connectors/source-redshift/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-redshift COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.3.9 +LABEL io.airbyte.version=0.3.10 LABEL io.airbyte.name=airbyte/source-redshift diff --git a/airbyte-integrations/connectors/source-scaffold-java-jdbc/src/main/java/io/airbyte/integrations/source/scaffold_java_jdbc/ScaffoldJavaJdbcSource.java b/airbyte-integrations/connectors/source-scaffold-java-jdbc/src/main/java/io/airbyte/integrations/source/scaffold_java_jdbc/ScaffoldJavaJdbcSource.java index 1f8e82d00496f7..e2be871f2000e7 100644 --- a/airbyte-integrations/connectors/source-scaffold-java-jdbc/src/main/java/io/airbyte/integrations/source/scaffold_java_jdbc/ScaffoldJavaJdbcSource.java +++ b/airbyte-integrations/connectors/source-scaffold-java-jdbc/src/main/java/io/airbyte/integrations/source/scaffold_java_jdbc/ScaffoldJavaJdbcSource.java @@ -6,7 +6,7 @@ import com.fasterxml.jackson.databind.JsonNode; import io.airbyte.db.jdbc.JdbcUtils; -import io.airbyte.db.jdbc.streaming.NoOpStreamingQueryConfig; +import io.airbyte.db.jdbc.streaming.AdaptiveStreamingQueryConfig; import io.airbyte.integrations.base.IntegrationRunner; import io.airbyte.integrations.base.Source; import io.airbyte.integrations.source.jdbc.AbstractJdbcSource; @@ -23,9 +23,9 @@ public class ScaffoldJavaJdbcSource extends AbstractJdbcSource impleme static final String DRIVER_CLASS = "driver_name_here"; public ScaffoldJavaJdbcSource() { - // By default, NoOpStreamingQueryConfig class is used. If the JDBC supports custom - // fetch size, change it to AdaptiveStreamingQueryConfig for better performance. - super(DRIVER_CLASS, NoOpStreamingQueryConfig::new, JdbcUtils.getDefaultSourceOperations()); + // TODO: if the JDBC driver does not support custom fetch size, use NoOpStreamingQueryConfig + // instead of AdaptiveStreamingQueryConfig. + super(DRIVER_CLASS, AdaptiveStreamingQueryConfig::new, JdbcUtils.getDefaultSourceOperations()); } // TODO The config is based on spec.json, update according to your DB diff --git a/airbyte-integrations/connectors/source-snowflake/Dockerfile b/airbyte-integrations/connectors/source-snowflake/Dockerfile index f2d1461977b569..7d904bc10687fd 100644 --- a/airbyte-integrations/connectors/source-snowflake/Dockerfile +++ b/airbyte-integrations/connectors/source-snowflake/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-snowflake COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.11 +LABEL io.airbyte.version=0.1.12 LABEL io.airbyte.name=airbyte/source-snowflake diff --git a/airbyte-integrations/connectors/source-tidb/Dockerfile b/airbyte-integrations/connectors/source-tidb/Dockerfile index 6cd5e75bcfbe12..6179f1f2b654b9 100755 --- a/airbyte-integrations/connectors/source-tidb/Dockerfile +++ b/airbyte-integrations/connectors/source-tidb/Dockerfile @@ -17,5 +17,5 @@ ENV APPLICATION source-tidb COPY --from=build /airbyte /airbyte # Airbyte's build system uses these labels to know what to name and tag the docker images produced by this Dockerfile. -LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.version=0.1.1 LABEL io.airbyte.name=airbyte/source-tidb diff --git a/airbyte-integrations/connectors/source-tidb/src/main/java/io/airbyte/integrations/source/tidb/TiDBSource.java b/airbyte-integrations/connectors/source-tidb/src/main/java/io/airbyte/integrations/source/tidb/TiDBSource.java index 370236b8d69172..f10efc8b6e163f 100644 --- a/airbyte-integrations/connectors/source-tidb/src/main/java/io/airbyte/integrations/source/tidb/TiDBSource.java +++ b/airbyte-integrations/connectors/source-tidb/src/main/java/io/airbyte/integrations/source/tidb/TiDBSource.java @@ -8,7 +8,7 @@ import com.google.common.collect.ImmutableMap; import com.mysql.cj.MysqlType; import io.airbyte.commons.json.Jsons; -import io.airbyte.db.jdbc.streaming.NoOpStreamingQueryConfig; +import io.airbyte.db.jdbc.streaming.AdaptiveStreamingQueryConfig; import io.airbyte.integrations.base.IntegrationRunner; import io.airbyte.integrations.base.Source; import io.airbyte.integrations.base.ssh.SshWrappedSource; @@ -33,7 +33,7 @@ public static Source sshWrappedSource() { } public TiDBSource() { - super(DRIVER_CLASS, NoOpStreamingQueryConfig::new, new TiDBSourceOperations()); + super(DRIVER_CLASS, AdaptiveStreamingQueryConfig::new, new TiDBSourceOperations()); } @Override diff --git a/docs/integrations/sources/cockroachdb.md b/docs/integrations/sources/cockroachdb.md index d31a5571f4cba9..3de1b54f3b0f3f 100644 --- a/docs/integrations/sources/cockroachdb.md +++ b/docs/integrations/sources/cockroachdb.md @@ -95,6 +95,7 @@ Your database user should now be ready for use with Airbyte. | Version | Date | Pull Request | Subject | |:--------|:-----------| :--- | :--- | +| 0.1.12 | 2022-04-29 | [12480](https://github.com/airbytehq/airbyte/pull/12480) | Query tables with adaptive fetch size to optimize JDBC memory consumption | | 0.1.11 | 2022-04-06 | [11729](https://github.com/airbytehq/airbyte/pull/11729) | Bump mina-sshd from 2.7.0 to 2.8.0 | | 0.1.10 | 2022-02-24 | [10235](https://github.com/airbytehq/airbyte/pull/10235) | Fix Replication Failure due Multiple portal opens | | 0.1.9 | 2022-02-21 | [10242](https://github.com/airbytehq/airbyte/pull/10242) | Fixed cursor for old connectors that use non-microsecond format. Now connectors work with both formats | @@ -110,6 +111,7 @@ Your database user should now be ready for use with Airbyte. | Version | Date | Pull Request | Subject | |:--------| :--- | :--- | :--- | +| 0.1.12 | 2022-04-29 | [12480](https://github.com/airbytehq/airbyte/pull/12480) | Query tables with adaptive fetch size to optimize JDBC memory consumption | | 0.1.8 | 2022-04-06 | [11729](https://github.com/airbytehq/airbyte/pull/11729) | Bump mina-sshd from 2.7.0 to 2.8.0 | | 0.1.6 | 2022-02-21 | [10242](https://github.com/airbytehq/airbyte/pull/10242) | Fixed cursor for old connectors that use non-microsecond format. Now connectors work with both formats | | 0.1.5 | 2022-02-18 | [10242](https://github.com/airbytehq/airbyte/pull/10242) | Updated timestamp transformation with microseconds | @@ -118,4 +120,3 @@ Your database user should now be ready for use with Airbyte. | 0.1.2 | 2021-12-24 | [9004](https://github.com/airbytehq/airbyte/pull/9004) | User can see only permmited tables during discovery | | 0.1.1 | 2021-12-24 | [8958](https://github.com/airbytehq/airbyte/pull/8958) | Add support for JdbcType.ARRAY | | 0.1.0 | 2021-11-23 | [7457](https://github.com/airbytehq/airbyte/pull/7457) | CockroachDb source: Add only encrypted version for the connector | - diff --git a/docs/integrations/sources/db2.md b/docs/integrations/sources/db2.md index 173f7973d94a66..7a029ecb619ed5 100644 --- a/docs/integrations/sources/db2.md +++ b/docs/integrations/sources/db2.md @@ -62,6 +62,7 @@ You can also enter your own password for the keystore, but if you don't, the pas | Version | Date | Pull Request | Subject | | :--- | :--- | :--- | :--- | +| 0.1.10 | 2022-04-29 | [12480](https://github.com/airbytehq/airbyte/pull/12480) | Query tables with adaptive fetch size to optimize JDBC memory consumption | | 0.1.9 | 2022-02-21 | [10242](https://github.com/airbytehq/airbyte/pull/10242) | Fixed cursor for old connectors that use non-microsecond format. Now connectors work with both formats | | 0.1.8 | 2022-02-18 | [10242](https://github.com/airbytehq/airbyte/pull/10242) | Updated timestamp transformation with microseconds | | 0.1.7 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `-XX:+ExitOnOutOfMemoryError` JVM option |**** @@ -72,4 +73,3 @@ You can also enter your own password for the keystore, but if you don't, the pas | 0.1.2 | 2021-10-25 | [7355](https://github.com/airbytehq/airbyte/pull/7355) | Added ssl support | | 0.1.1 | 2021-08-13 | [4699](https://github.com/airbytehq/airbyte/pull/4699) | Added json config validator | | 0.1.0 | 2021-06-22 | [4197](https://github.com/airbytehq/airbyte/pull/4197) | New Source: IBM DB2 | - diff --git a/docs/integrations/sources/mssql.md b/docs/integrations/sources/mssql.md index 69495aec355606..43790f189caf48 100644 --- a/docs/integrations/sources/mssql.md +++ b/docs/integrations/sources/mssql.md @@ -294,6 +294,7 @@ If you do not see a type in this list, assume that it is coerced into a string. | Version | Date | Pull Request | Subject | |:--------|:-----------| :----------------------------------------------------- | :------------------------------------- | +| 0.3.22 | 2022-04-29 | [12480](https://github.com/airbytehq/airbyte/pull/12480) | Query tables with adaptive fetch size to optimize JDBC memory consumption | | 0.3.21 | 2022-04-11 | [11729](https://github.com/airbytehq/airbyte/pull/11729) | Bump mina-sshd from 2.7.0 to 2.8.0 | | 0.3.19 | 2022-03-31 | [11495](https://github.com/airbytehq/airbyte/pull/11495) | Adds Support to Chinese MSSQL Server Agent | | 0.3.18 | 2022-03-29 | [11010](https://github.com/airbytehq/airbyte/pull/11010) | Adds JDBC Params | @@ -326,4 +327,3 @@ If you do not see a type in this list, assume that it is coerced into a string. | 0.1.6 | 2020-12-09 | [1172](https://github.com/airbytehq/airbyte/pull/1172) | Support incremental sync | | | 0.1.5 | 2020-11-30 | [1038](https://github.com/airbytehq/airbyte/pull/1038) | Change JDBC sources to discover more than standard schemas | | | 0.1.4 | 2020-11-30 | [1046](https://github.com/airbytehq/airbyte/pull/1046) | Add connectors using an index YAML file | | - diff --git a/docs/integrations/sources/mysql.md b/docs/integrations/sources/mysql.md index e9d8298526d00c..811c26f111f986 100644 --- a/docs/integrations/sources/mysql.md +++ b/docs/integrations/sources/mysql.md @@ -185,6 +185,7 @@ If you do not see a type in this list, assume that it is coerced into a string. | Version | Date | Pull Request | Subject | |:--------|:-----------|:-----------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------| +| 0.5.10 | 2022-04-29 | [12480](https://github.com/airbytehq/airbyte/pull/12480) | Query tables with adaptive fetch size to optimize JDBC memory consumption | | 0.5.9 | 2022-04-06 | [11729](https://github.com/airbytehq/airbyte/pull/11729) | Bump mina-sshd from 2.7.0 to 2.8.0 | | 0.5.6 | 2022-02-21 | [10242](https://github.com/airbytehq/airbyte/pull/10242) | Fixed cursor for old connectors that use non-microsecond format. Now connectors work with both formats | | 0.5.5 | 2022-02-18 | [10242](https://github.com/airbytehq/airbyte/pull/10242) | Updated timestamp transformation with microseconds | diff --git a/docs/integrations/sources/oracle.md b/docs/integrations/sources/oracle.md index 0b2432879f37f1..031e55def8e218 100644 --- a/docs/integrations/sources/oracle.md +++ b/docs/integrations/sources/oracle.md @@ -132,6 +132,7 @@ Airbite has the ability to connect to the Oracle source with 3 network connectiv | Version | Date | Pull Request | Subject | |:--------| :--- | :--- |:------------------------------------------------| +| 0.3.15 | 2022-04-29 | [12480](https://github.com/airbytehq/airbyte/pull/12480) | Query tables with adaptive fetch size to optimize JDBC memory consumption | | 0.3.14 | 2022-02-21 | [10242](https://github.com/airbytehq/airbyte/pull/10242) | Fixed cursor for old connectors that use non-microsecond format. Now connectors work with both formats | | 0.3.13 | 2022-02-18 | [10242](https://github.com/airbytehq/airbyte/pull/10242) | Updated timestamp transformation with microseconds | | 0.3.12 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `-XX:+ExitOnOutOfMemoryError` JVM option | @@ -145,4 +146,3 @@ Airbite has the ability to connect to the Oracle source with 3 network connectiv | 0.3.4 | 2021-09-01 | [6038](https://github.com/airbytehq/airbyte/pull/6038) | Remove automatic filtering of system schemas. | | 0.3.3 | 2021-09-01 | [5779](https://github.com/airbytehq/airbyte/pull/5779) | Ability to only discover certain schemas. | | 0.3.2 | 2021-08-13 | [4699](https://github.com/airbytehq/airbyte/pull/4699) | Added json config validator. | - diff --git a/docs/integrations/sources/postgres.md b/docs/integrations/sources/postgres.md index 6b349289c48003..f5220081c57f5c 100644 --- a/docs/integrations/sources/postgres.md +++ b/docs/integrations/sources/postgres.md @@ -270,6 +270,7 @@ According to Postgres [documentation](https://www.postgresql.org/docs/14/datatyp | Version | Date | Pull Request | Subject | |:--------|:-----------|:-------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------| +| 0.4.12 | 2022-04-29 | [12480](https://github.com/airbytehq/airbyte/pull/12480) | Query tables with adaptive fetch size to optimize JDBC memory consumption | | 0.4.11 | 2022-04-11 | [11729](https://github.com/airbytehq/airbyte/pull/11729) | Bump mina-sshd from 2.7.0 to 2.8.0 | | 0.4.10 | 2022-04-08 | [11798](https://github.com/airbytehq/airbyte/pull/11798) | Fixed roles for fetching materialized view processing | | 0.4.8 | 2022-02-21 | [10242](https://github.com/airbytehq/airbyte/pull/10242) | Fixed cursor for old connectors that use non-microsecond format. Now connectors work with both formats | @@ -313,4 +314,3 @@ According to Postgres [documentation](https://www.postgresql.org/docs/14/datatyp | 0.1.6 | 2020-12-09 | [1172](https://github.com/airbytehq/airbyte/pull/1172) | Support incremental sync | | 0.1.5 | 2020-11-30 | [1038](https://github.com/airbytehq/airbyte/pull/1038) | Change JDBC sources to discover more than standard schemas | | 0.1.4 | 2020-11-30 | [1046](https://github.com/airbytehq/airbyte/pull/1046) | Add connectors using an index YAML file | - diff --git a/docs/integrations/sources/redshift.md b/docs/integrations/sources/redshift.md index 34ca674590756a..33bd04918bc269 100644 --- a/docs/integrations/sources/redshift.md +++ b/docs/integrations/sources/redshift.md @@ -54,6 +54,7 @@ All Redshift connections are encrypted using SSL | Version | Date | Pull Request | Subject | | :------ | :-------- | :----- | :------ | +| 0.3.10 | 2022-04-29 | [12480](https://github.com/airbytehq/airbyte/pull/12480) | Query tables with adaptive fetch size to optimize JDBC memory consumption |0 | 0.3.9 | 2022-02-21 | [9744](https://github.com/airbytehq/airbyte/pull/9744) | List only the tables on which the user has SELECT permissions. | 0.3.8 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `-XX:+ExitOnOutOfMemoryError` JVM option | | 0.3.7 | 2022-01-26 | [9721](https://github.com/airbytehq/airbyte/pull/9721) | Added schema selection | diff --git a/docs/integrations/sources/snowflake.md b/docs/integrations/sources/snowflake.md index f500081c8b2a63..ddeca9ac1d0352 100644 --- a/docs/integrations/sources/snowflake.md +++ b/docs/integrations/sources/snowflake.md @@ -103,6 +103,7 @@ Field | Description | | Version | Date | Pull Request | Subject | | :--- | :--- | :--- | :--- | +| 0.1.12 | 2022-04-29 | [12480](https://github.com/airbytehq/airbyte/pull/12480) | Query tables with adaptive fetch size to optimize JDBC memory consumption | | 0.1.11 | 2022-04-27 | [10953](https://github.com/airbytehq/airbyte/pull/10953) | Implement OAuth flow | | 0.1.9 | 2022-02-21 | [10242](https://github.com/airbytehq/airbyte/pull/10242) | Fixed cursor for old connectors that use non-microsecond format. Now connectors work with both formats | | 0.1.8 | 2022-02-18 | [10242](https://github.com/airbytehq/airbyte/pull/10242) | Updated timestamp transformation with microseconds | diff --git a/docs/integrations/sources/tidb.md b/docs/integrations/sources/tidb.md index 93e6864d77be30..5cea0d66f7331b 100644 --- a/docs/integrations/sources/tidb.md +++ b/docs/integrations/sources/tidb.md @@ -120,4 +120,5 @@ Using this feature requires additional configuration, when creating the source. | Version | Date | Pull Request | Subject | | :------ | :--- | :----------- | ------- | +| 0.1.1 | 2022-04-29 | [12480](https://github.com/airbytehq/airbyte/pull/12480) | Query tables with adaptive fetch size to optimize JDBC memory consumption | | 0.1.0 | 2022-04-19 | [11283](https://github.com/airbytehq/airbyte/pull/11283) | Initial Release | From 18f59bfc9a7cf8364f12c0eb2e74c3346307b078 Mon Sep 17 00:00:00 2001 From: LiRen Tu Date: Fri, 29 Apr 2022 23:45:42 -0700 Subject: [PATCH 042/152] Add object methods for json schema type (#12493) --- .../source/jdbc/AbstractJdbcSource.java | 2 +- .../protocol/models/JsonSchemaType.java | 28 +++++++++++++++++-- 2 files changed, 26 insertions(+), 4 deletions(-) diff --git a/airbyte-integrations/connectors/source-jdbc/src/main/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSource.java b/airbyte-integrations/connectors/source-jdbc/src/main/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSource.java index d7ee2d9380ee3a..7bc9111ef7b5f7 100644 --- a/airbyte-integrations/connectors/source-jdbc/src/main/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSource.java +++ b/airbyte-integrations/connectors/source-jdbc/src/main/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSource.java @@ -135,7 +135,7 @@ protected List>> discoverInternal(final JdbcData .map(f -> { final Datatype datatype = getFieldType(f); final JsonSchemaType jsonType = getType(datatype); - LOGGER.info("Table {} column {} (type {}[{}]) -> Json type {}", + LOGGER.info("Table {} column {} (type {}[{}]) -> {}", fields.get(0).get(INTERNAL_TABLE_NAME).asText(), f.get(INTERNAL_COLUMN_NAME).asText(), f.get(INTERNAL_COLUMN_TYPE_NAME).asText(), diff --git a/airbyte-protocol/models/src/main/java/io/airbyte/protocol/models/JsonSchemaType.java b/airbyte-protocol/models/src/main/java/io/airbyte/protocol/models/JsonSchemaType.java index f9d2bf5b0492d0..9d063088cf5a9a 100644 --- a/airbyte-protocol/models/src/main/java/io/airbyte/protocol/models/JsonSchemaType.java +++ b/airbyte-protocol/models/src/main/java/io/airbyte/protocol/models/JsonSchemaType.java @@ -6,6 +6,7 @@ import com.google.common.collect.ImmutableMap; import java.util.Map; +import java.util.Objects; public class JsonSchemaType { @@ -51,17 +52,17 @@ private Builder(final JsonSchemaPrimitive type) { typeMapBuilder.put(TYPE, type.name().toLowerCase()); } - public Builder withFormat(String value) { + public Builder withFormat(final String value) { typeMapBuilder.put(FORMAT, value); return this; } - public Builder withContentEncoding(String value) { + public Builder withContentEncoding(final String value) { typeMapBuilder.put(CONTENT_ENCODING, value); return this; } - public Builder withAirbyteType(String value) { + public Builder withAirbyteType(final String value) { typeMapBuilder.put(AIRBYTE_TYPE, value); return this; } @@ -72,4 +73,25 @@ public JsonSchemaType build() { } + @Override + public String toString() { + return String.format("JsonSchemaType(%s)", jsonSchemaTypeMap.toString()); + } + + @Override + public boolean equals(final Object other) { + if (other == null) { + return false; + } + if (!(other instanceof final JsonSchemaType that)) { + return false; + } + return Objects.equals(this.jsonSchemaTypeMap, that.jsonSchemaTypeMap); + } + + @Override + public int hashCode() { + return Objects.hashCode(this.jsonSchemaTypeMap); + } + } From 2476a79ed2369a7aa682c3d3c5d5ac4188d85142 Mon Sep 17 00:00:00 2001 From: LiRen Tu Date: Sat, 30 Apr 2022 17:22:23 -0700 Subject: [PATCH 043/152] Add build status for mysql and mssql destination (#12502) --- airbyte-integrations/builds.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/airbyte-integrations/builds.md b/airbyte-integrations/builds.md index 62bfda98ba17ff..d39bf471be5840 100644 --- a/airbyte-integrations/builds.md +++ b/airbyte-integrations/builds.md @@ -135,6 +135,8 @@ | MariaDB ColumnStore | [![destination-mariadb-columnstore](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-mariadb-columnstore%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-mariadb-columnstore) | | Mongo DB | [![destination-mongodb](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-mongodb%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-mongodb) | | MQTT | [![destination-mqtt](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-mqtt%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-mqtt) | +| MSSQL (SQL Server) | [![destination-mssql](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-mssql%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-mssql) | +| MySQL | [![destination-mysql](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-mysql%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-mysql) | | Postgres | [![destination-postgres](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-postgres%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-postgres) | | Pulsar | [![destination-pulsar](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-pulsar%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-pulsar) | | Redshift | [![destination-redshift](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-redshift%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-redshift) | From 4ee1fa6c5a5bcfa9e2a881e93b3d828994ba7d43 Mon Sep 17 00:00:00 2001 From: Brian Leonard Date: Sun, 1 May 2022 14:38:43 -0700 Subject: [PATCH 044/152] Use shared workflow action on issues (#12382) * Use shared workflow action on issues * Rename to token * Also on unlabeled --- .github/workflows/shared-issues.yml | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 .github/workflows/shared-issues.yml diff --git a/.github/workflows/shared-issues.yml b/.github/workflows/shared-issues.yml new file mode 100644 index 00000000000000..e7c167af81fc09 --- /dev/null +++ b/.github/workflows/shared-issues.yml @@ -0,0 +1,13 @@ +name: "Shared Issues" +on: + issues: + types: [opened, labeled, unlabeled] + +jobs: + shared-issues: + runs-on: ubuntu-latest + steps: + - uses: airbytehq/workflow-actions@production + with: + token: "${{ secrets.OCTAVIA_PAT }}" + command: "issue" From 22cebe4b5b253fcb899fda08e1ef6576f1303b43 Mon Sep 17 00:00:00 2001 From: Daniel Stevenson Date: Mon, 2 May 2022 09:08:50 +0200 Subject: [PATCH 045/152] CDK: DRY acceptance test templates (#12067) --- .../generator/package.json | 1 + .../connector-templates/generator/plopfile.js | 73 ++++++++++++++++++- .../source-generic/acceptance-test-config.yml | 25 ------- .../acceptance-test-config.yml.hbs | 30 -------- .../acceptance-test-docker.sh | 16 ---- .../source-python/acceptance-test-docker.sh | 16 ---- .../acceptance-test-config.yml.hbs | 30 -------- .../source-singer/acceptance-test-docker.sh | 16 ---- .../acceptance-test-config.yml.hbs | 4 +- .../acceptance-test-docker.sh | 0 .../acceptance-test-config.yml | 12 +-- 11 files changed, 81 insertions(+), 142 deletions(-) delete mode 100644 airbyte-integrations/connector-templates/source-generic/acceptance-test-config.yml delete mode 100644 airbyte-integrations/connector-templates/source-python-http-api/acceptance-test-config.yml.hbs delete mode 100644 airbyte-integrations/connector-templates/source-python-http-api/acceptance-test-docker.sh delete mode 100644 airbyte-integrations/connector-templates/source-python/acceptance-test-docker.sh delete mode 100644 airbyte-integrations/connector-templates/source-singer/acceptance-test-config.yml.hbs delete mode 100644 airbyte-integrations/connector-templates/source-singer/acceptance-test-docker.sh rename airbyte-integrations/connector-templates/{source-python => source_acceptance_test_files}/acceptance-test-config.yml.hbs (92%) rename airbyte-integrations/connector-templates/{source-generic => source_acceptance_test_files}/acceptance-test-docker.sh (100%) diff --git a/airbyte-integrations/connector-templates/generator/package.json b/airbyte-integrations/connector-templates/generator/package.json index 0a4ae5ac873f17..f7bd9559da45d3 100644 --- a/airbyte-integrations/connector-templates/generator/package.json +++ b/airbyte-integrations/connector-templates/generator/package.json @@ -7,6 +7,7 @@ }, "devDependencies": { "capital-case": "^1.0.4", + "change-case": "^4.1.2", "handlebars": "^4.7.7", "plop": "^3.0.5", "set-value": ">=4.0.1", diff --git a/airbyte-integrations/connector-templates/generator/plopfile.js b/airbyte-integrations/connector-templates/generator/plopfile.js index 86c5f215755218..d9914238b4a460 100644 --- a/airbyte-integrations/connector-templates/generator/plopfile.js +++ b/airbyte-integrations/connector-templates/generator/plopfile.js @@ -2,7 +2,8 @@ const path = require('path'); const uuid = require('uuid'); const capitalCase = require('capital-case'); - +const changeCase = require('change-case') + const getSuccessMessage = function(connectorName, outputPath, additionalMessage){ return ` 🚀 🚀 🚀 🚀 🚀 🚀 @@ -27,6 +28,8 @@ module.exports = function (plop) { const docRoot = '../../../docs/integrations'; const definitionRoot = '../../../airbyte-config/init/src/main/resources'; + const sourceAcceptanceTestFilesInputRoot = '../source_acceptance_test_files'; + const pythonSourceInputRoot = '../source-python'; const singerSourceInputRoot = '../source-singer'; const genericSourceInputRoot = '../source-generic'; @@ -43,11 +46,40 @@ module.exports = function (plop) { const httpApiOutputRoot = `${outputDir}/source-{{dashCase name}}`; const javaDestinationOutputRoot = `${outputDir}/destination-{{dashCase name}}`; const pythonDestinationOutputRoot = `${outputDir}/destination-{{dashCase name}}`; + const sourceConnectorImagePrefix = 'airbyte/source-' + const sourceConnectorImageTag = 'dev' + const defaultSpecPathFolderPrefix = 'source_' + const specFileName = 'spec.yaml' + plop.setHelper('capitalCase', function(name) { return capitalCase.capitalCase(name); }); + plop.setHelper('connectorImage', function() { + let suffix = "" + if (typeof this.connectorImageNameSuffix !== 'undefined') { + suffix = this.connectorImageNameSuffix + } + return `${sourceConnectorImagePrefix}${changeCase.paramCase(this.name)}${suffix}:${sourceConnectorImageTag}` + }); + + plop.setHelper('specPath', function() { + let suffix = "" + if (typeof this.specPathFolderSuffix !== 'undefined') { + suffix = this.specPathFolderSuffix + } + let inSubFolder = true + if (typeof this.inSubFolder !== 'undefined') { + inSubFolder = this.inSubFolder + } + if (inSubFolder) { + return `${defaultSpecPathFolderPrefix}${changeCase.snakeCase(this.name)}${suffix}/${specFileName}` + } else { + return specFileName + } + }); + plop.setActionType('emitSuccess', function(answers, config, plopApi){ console.log(getSuccessMessage(answers.name, plopApi.renderString(config.outputPath, answers), config.message)); }); @@ -86,6 +118,14 @@ module.exports = function (plop) { base: httpApiInputRoot, templateFiles: `${httpApiInputRoot}/**/**`, }, + // common acceptance tests + { + abortOnFail: true, + type:'addMany', + destination: httpApiOutputRoot, + base: sourceAcceptanceTestFilesInputRoot, + templateFiles: `${sourceAcceptanceTestFilesInputRoot}/**/**`, + }, // plop doesn't add dotfiles by default so we manually add them { type:'add', @@ -113,6 +153,18 @@ module.exports = function (plop) { base: singerSourceInputRoot, templateFiles: `${singerSourceInputRoot}/**/**`, }, + // common acceptance tests + { + abortOnFail: true, + type:'addMany', + destination: singerSourceOutputRoot, + base: sourceAcceptanceTestFilesInputRoot, + templateFiles: `${sourceAcceptanceTestFilesInputRoot}/**/**`, + data: { + connectorImageNameSuffix: "-singer", + specPathFolderSuffix: "_singer" + } + }, { type:'add', abortOnFail: true, @@ -140,6 +192,14 @@ module.exports = function (plop) { base: pythonSourceInputRoot, templateFiles: `${pythonSourceInputRoot}/**/**`, }, + // common acceptance tests + { + abortOnFail: true, + type:'addMany', + destination: pythonSourceOutputRoot, + base: sourceAcceptanceTestFilesInputRoot, + templateFiles: `${sourceAcceptanceTestFilesInputRoot}/**/**`, + }, { type:'add', abortOnFail: true, @@ -175,6 +235,17 @@ module.exports = function (plop) { base: genericSourceInputRoot, templateFiles: `${genericSourceInputRoot}/**/**`, }, + // common acceptance tests + { + abortOnFail: true, + type:'addMany', + destination: genericSourceOutputRoot, + base: sourceAcceptanceTestFilesInputRoot, + templateFiles: `${sourceAcceptanceTestFilesInputRoot}/**/**`, + data: { + inSubFolder: false + } + }, {type: 'emitSuccess', outputPath: genericSourceOutputRoot} ] }); diff --git a/airbyte-integrations/connector-templates/source-generic/acceptance-test-config.yml b/airbyte-integrations/connector-templates/source-generic/acceptance-test-config.yml deleted file mode 100644 index 4d2eeb3e00e6ae..00000000000000 --- a/airbyte-integrations/connector-templates/source-generic/acceptance-test-config.yml +++ /dev/null @@ -1,25 +0,0 @@ -# See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) -# for more information about how to configure these tests -connector_image: airbyte/source-{{dashCase name}}:dev -tests: - spec: - - spec_path: "spec.json" - config_path: "secrets/valid_config.json" # TODO add this file - connection: - - config_path: "secrets/valid_config.json" # TODO add this file - status: "succeed" - - config_path: "secrets/invalid_config.json" # TODO add this file - status: "failed" - discovery: - - config_path: "secrets/valid_config.json" - basic_read: - - config_path: "secrets/valid_config.json" - configured_catalog_path: "fullrefresh_configured_catalog.json" # TODO add or change this file - empty_streams: [] - full_refresh: - - config_path: "secrets/valid_config.json" - configured_catalog_path: "fullrefresh_configured_catalog.json" # TODO add or change this file -# incremental: # TODO uncomment this once you implement incremental sync -# - config_path: "secrets/config.json" -# configured_catalog_path: "integration_tests/configured_catalog.json" -# future_state_path: "integration_tests/abnormal_state.json" diff --git a/airbyte-integrations/connector-templates/source-python-http-api/acceptance-test-config.yml.hbs b/airbyte-integrations/connector-templates/source-python-http-api/acceptance-test-config.yml.hbs deleted file mode 100644 index 49acab015f3f86..00000000000000 --- a/airbyte-integrations/connector-templates/source-python-http-api/acceptance-test-config.yml.hbs +++ /dev/null @@ -1,30 +0,0 @@ -# See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) -# for more information about how to configure these tests -connector_image: airbyte/source-{{dashCase name}}:dev -tests: - spec: - - spec_path: "source_{{snakeCase name}}/spec.yaml" - connection: - - config_path: "secrets/config.json" - status: "succeed" - - config_path: "integration_tests/invalid_config.json" - status: "failed" - discovery: - - config_path: "secrets/config.json" - basic_read: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" - empty_streams: [] -# TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file -# expect_records: -# path: "integration_tests/expected_records.txt" -# extra_fields: no -# exact_order: no -# extra_records: yes - incremental: # TODO if your connector does not implement incremental sync, remove this block - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" - future_state_path: "integration_tests/abnormal_state.json" - full_refresh: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connector-templates/source-python-http-api/acceptance-test-docker.sh b/airbyte-integrations/connector-templates/source-python-http-api/acceptance-test-docker.sh deleted file mode 100644 index c51577d10690c1..00000000000000 --- a/airbyte-integrations/connector-templates/source-python-http-api/acceptance-test-docker.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/env sh - -# Build latest connector image -docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) - -# Pull latest acctest image -docker pull airbyte/source-acceptance-test:latest - -# Run -docker run --rm -it \ - -v /var/run/docker.sock:/var/run/docker.sock \ - -v /tmp:/tmp \ - -v $(pwd):/test_input \ - airbyte/source-acceptance-test \ - --acceptance-test-config /test_input - diff --git a/airbyte-integrations/connector-templates/source-python/acceptance-test-docker.sh b/airbyte-integrations/connector-templates/source-python/acceptance-test-docker.sh deleted file mode 100644 index c51577d10690c1..00000000000000 --- a/airbyte-integrations/connector-templates/source-python/acceptance-test-docker.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/env sh - -# Build latest connector image -docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) - -# Pull latest acctest image -docker pull airbyte/source-acceptance-test:latest - -# Run -docker run --rm -it \ - -v /var/run/docker.sock:/var/run/docker.sock \ - -v /tmp:/tmp \ - -v $(pwd):/test_input \ - airbyte/source-acceptance-test \ - --acceptance-test-config /test_input - diff --git a/airbyte-integrations/connector-templates/source-singer/acceptance-test-config.yml.hbs b/airbyte-integrations/connector-templates/source-singer/acceptance-test-config.yml.hbs deleted file mode 100644 index f485a8c6460ddb..00000000000000 --- a/airbyte-integrations/connector-templates/source-singer/acceptance-test-config.yml.hbs +++ /dev/null @@ -1,30 +0,0 @@ -# See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) -# for more information about how to configure these tests -connector_image: airbyte/source-{{dashCase name}}-singer:dev -tests: - spec: - - spec_path: "source_{{snakeCase name}}_singer/spec.yaml" - connection: - - config_path: "secrets/config.json" - status: "succeed" - - config_path: "integration_tests/invalid_config.json" - status: "exception" - discovery: - - config_path: "secrets/config.json" - basic_read: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" - empty_streams: [] -# TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file -# expect_records: -# path: "integration_tests/expected_records.txt" -# extra_fields: no -# exact_order: no -# extra_records: yes - incremental: # TODO if your connector does not implement incremental sync, remove this block - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" - future_state_path: "integration_tests/abnormal_state.json" - full_refresh: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connector-templates/source-singer/acceptance-test-docker.sh b/airbyte-integrations/connector-templates/source-singer/acceptance-test-docker.sh deleted file mode 100644 index e4d8b1cef8961e..00000000000000 --- a/airbyte-integrations/connector-templates/source-singer/acceptance-test-docker.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/env sh - -# Build latest connector image -docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2) - -# Pull latest acctest image -docker pull airbyte/source-acceptance-test:latest - -# Run -docker run --rm -it \ - -v /var/run/docker.sock:/var/run/docker.sock \ - -v /tmp:/tmp \ - -v $(pwd):/test_input \ - airbyte/source-acceptance-test \ - --acceptance-test-config /test_input - diff --git a/airbyte-integrations/connector-templates/source-python/acceptance-test-config.yml.hbs b/airbyte-integrations/connector-templates/source_acceptance_test_files/acceptance-test-config.yml.hbs similarity index 92% rename from airbyte-integrations/connector-templates/source-python/acceptance-test-config.yml.hbs rename to airbyte-integrations/connector-templates/source_acceptance_test_files/acceptance-test-config.yml.hbs index 2ec2ab2694f8f0..3981383e5d7657 100644 --- a/airbyte-integrations/connector-templates/source-python/acceptance-test-config.yml.hbs +++ b/airbyte-integrations/connector-templates/source_acceptance_test_files/acceptance-test-config.yml.hbs @@ -1,9 +1,9 @@ # See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) # for more information about how to configure these tests -connector_image: airbyte/source-{{dashCase name}}:dev +connector_image: {{ connectorImage }} tests: spec: - - spec_path: "source_{{snakeCase name}}/spec.yaml" + - spec_path: "{{ specPath }}" connection: - config_path: "secrets/config.json" status: "succeed" diff --git a/airbyte-integrations/connector-templates/source-generic/acceptance-test-docker.sh b/airbyte-integrations/connector-templates/source_acceptance_test_files/acceptance-test-docker.sh similarity index 100% rename from airbyte-integrations/connector-templates/source-generic/acceptance-test-docker.sh rename to airbyte-integrations/connector-templates/source_acceptance_test_files/acceptance-test-docker.sh diff --git a/airbyte-integrations/connectors/source-scaffold-source-http/acceptance-test-config.yml b/airbyte-integrations/connectors/source-scaffold-source-http/acceptance-test-config.yml index a625390b4d5eb6..97cf9c7d8e1cd9 100644 --- a/airbyte-integrations/connectors/source-scaffold-source-http/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-scaffold-source-http/acceptance-test-config.yml @@ -15,12 +15,12 @@ tests: - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" empty_streams: [] -# TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file -# expect_records: -# path: "integration_tests/expected_records.txt" -# extra_fields: no -# exact_order: no -# extra_records: yes + # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file + # expect_records: + # path: "integration_tests/expected_records.txt" + # extra_fields: no + # exact_order: no + # extra_records: yes incremental: # TODO if your connector does not implement incremental sync, remove this block - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" From 407c06c016ee089f64ad84327d15079524b8dfc1 Mon Sep 17 00:00:00 2001 From: Edmundo Ruiz Ghanem <168664+edmundito@users.noreply.github.com> Date: Mon, 2 May 2022 09:45:38 -0400 Subject: [PATCH 046/152] Fix CircleLoader component svg props (#12475) --- airbyte-webapp/src/components/StatusIcon/CircleLoader.tsx | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/airbyte-webapp/src/components/StatusIcon/CircleLoader.tsx b/airbyte-webapp/src/components/StatusIcon/CircleLoader.tsx index 20cc81a4fa794e..6cf80c9e2c9399 100644 --- a/airbyte-webapp/src/components/StatusIcon/CircleLoader.tsx +++ b/airbyte-webapp/src/components/StatusIcon/CircleLoader.tsx @@ -47,15 +47,15 @@ const CircleLoader = ({ title }: Props): JSX.Element => ( From 764b7d9df6e91068e3e7a5656b3d5e477dce4bf1 Mon Sep 17 00:00:00 2001 From: Edmundo Ruiz Ghanem <168664+edmundito@users.noreply.github.com> Date: Mon, 2 May 2022 09:46:25 -0400 Subject: [PATCH 047/152] Update Input password component layout to avoid password manager button overlap with visibility button (#12398) * Update Input password component visibilty button * Update layout to prevent password manager buttons from overlapping with visiblity button * Apply consistent layout to both password and non-password inputs * Add Input component unit test * Update InputProps to interface * Ensure input component can be assigned type * Add aria label to visiblity button in password input * Fix type issues with testutils render --- .../src/components/base/Input/Input.test.tsx | 44 +++++++ .../src/components/base/Input/Input.tsx | 110 +++++++++++------- airbyte-webapp/src/locales/en.json | 2 + airbyte-webapp/src/utils/testutils.tsx | 23 ++-- 4 files changed, 123 insertions(+), 56 deletions(-) create mode 100644 airbyte-webapp/src/components/base/Input/Input.test.tsx diff --git a/airbyte-webapp/src/components/base/Input/Input.test.tsx b/airbyte-webapp/src/components/base/Input/Input.test.tsx new file mode 100644 index 00000000000000..d411053912237a --- /dev/null +++ b/airbyte-webapp/src/components/base/Input/Input.test.tsx @@ -0,0 +1,44 @@ +import { render } from "utils/testutils"; + +import { Input } from "./Input"; + +describe("", () => { + test("renders text input", async () => { + const value = "aribyte@example.com"; + const { getByTestId, queryByTestId } = await render(); + + expect(getByTestId("input")).toHaveAttribute("type", "text"); + expect(getByTestId("input")).toHaveValue(value); + expect(queryByTestId("toggle-password-visibility-button")).toBeFalsy(); + }); + + test("renders another type of input", async () => { + const type = "number"; + const value = 888; + const { getByTestId, queryByTestId } = await render(); + + expect(getByTestId("input")).toHaveAttribute("type", type); + expect(getByTestId("input")).toHaveValue(value); + expect(queryByTestId("toggle-password-visibility-button")).toBeFalsy(); + }); + + test("renders password input with visibilty button", async () => { + const value = "eight888"; + const { getByTestId, getByRole } = await render(); + + expect(getByTestId("input")).toHaveAttribute("type", "password"); + expect(getByTestId("input")).toHaveValue(value); + expect(getByRole("img", { hidden: true })).toHaveAttribute("data-icon", "eye"); + }); + + test("renders visible password when visibility button is clicked", async () => { + const value = "eight888"; + const { getByTestId, getByRole } = await render(); + + getByTestId("toggle-password-visibility-button")?.click(); + + expect(getByTestId("input")).toHaveAttribute("type", "text"); + expect(getByTestId("input")).toHaveValue(value); + expect(getByRole("img", { hidden: true })).toHaveAttribute("data-icon", "eye-slash"); + }); +}); diff --git a/airbyte-webapp/src/components/base/Input/Input.tsx b/airbyte-webapp/src/components/base/Input/Input.tsx index 796d12fa880cc9..117ff76e206a51 100644 --- a/airbyte-webapp/src/components/base/Input/Input.tsx +++ b/airbyte-webapp/src/components/base/Input/Input.tsx @@ -1,6 +1,8 @@ import { faEye, faEyeSlash } from "@fortawesome/free-regular-svg-icons"; import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; -import React, { useState } from "react"; +import React from "react"; +import { useIntl } from "react-intl"; +import { useToggle } from "react-use"; import styled from "styled-components"; import { Theme } from "theme"; @@ -18,37 +20,44 @@ const getBackgroundColor = (props: IStyleProps) => { return props.theme.greyColor0; }; -export type InputProps = { +export interface InputProps extends React.InputHTMLAttributes { error?: boolean; light?: boolean; -} & React.InputHTMLAttributes; +} -const InputComponent = styled.input` - outline: none; +const InputContainer = styled.div` width: 100%; - padding: 7px 18px 7px 8px; - border-radius: 4px; - font-size: 14px; - line-height: 20px; - font-weight: normal; - border: 1px solid ${(props) => (props.error ? props.theme.dangerColor : props.theme.greyColor0)}; + position: relative; background: ${(props) => getBackgroundColor(props)}; - color: ${({ theme }) => theme.textColor}; - caret-color: ${({ theme }) => theme.primaryColor}; - - &::placeholder { - color: ${({ theme }) => theme.greyColor40}; - } + border: 1px solid ${(props) => (props.error ? props.theme.dangerColor : props.theme.greyColor0)}; + border-radius: 4px; &:hover { background: ${({ theme, light }) => (light ? theme.whiteColor : theme.greyColor20)}; border-color: ${(props) => (props.error ? props.theme.dangerColor : props.theme.greyColor20)}; } - &:focus { + &.input-container--focused { background: ${({ theme, light }) => (light ? theme.whiteColor : theme.primaryColor12)}; border-color: ${({ theme }) => theme.primaryColor}; } +`; + +const InputComponent = styled.input` + outline: none; + width: ${({ isPassword }) => (isPassword ? "calc(100% - 22px)" : "100%")}; + padding: 7px 8px 7px 8px; + font-size: 14px; + line-height: 20px; + font-weight: normal; + border: none; + background: none; + color: ${({ theme }) => theme.textColor}; + caret-color: ${({ theme }) => theme.primaryColor}; + + &::placeholder { + color: ${({ theme }) => theme.greyColor40}; + } &:disabled { pointer-events: none; @@ -56,34 +65,53 @@ const InputComponent = styled.input` } `; -const Container = styled.div` - width: 100%; - position: relative; -`; - const VisibilityButton = styled(Button)` position: absolute; - right: 2px; - top: 7px; + right: 0px; + top: 0; + display: flex; + height: 100%; + width: 30px; + align-items: center; + justify-content: center; + border: none; `; const Input: React.FC = (props) => { - const [isContentVisible, setIsContentVisible] = useState(false); - - if (props.type === "password") { - return ( - - - {props.disabled ? null : ( - setIsContentVisible(!isContentVisible)} type="button"> - - - )} - - ); - } - - return ; + const { formatMessage } = useIntl(); + const [isContentVisible, setIsContentVisible] = useToggle(false); + const [focused, toggleFocused] = useToggle(false); + + const isPassword = props.type === "password"; + const isVisibilityButtonVisible = isPassword && !props.disabled; + const type = isPassword ? (isContentVisible ? "text" : "password") : props.type; + const onInputFocusChange = () => toggleFocused(); + + return ( + + + {isVisibilityButtonVisible ? ( + setIsContentVisible()} + type="button" + aria-label={formatMessage({ + id: `ui.input.${isContentVisible ? "hide" : "show"}Password`, + })} + data-testid="toggle-password-visibility-button" + > + + + ) : null} + + ); }; export default Input; diff --git a/airbyte-webapp/src/locales/en.json b/airbyte-webapp/src/locales/en.json index 7e6de2b13dae93..76c5831e364b51 100644 --- a/airbyte-webapp/src/locales/en.json +++ b/airbyte-webapp/src/locales/en.json @@ -490,6 +490,8 @@ "errorView.notFound": "Resource not found", "errorView.unknown": "Unknown", + "ui.input.showPassword": "Show password", + "ui.input.hidePassword": "Hide password", "ui.keyValuePair": "{key}: {value}", "ui.keyValuePairV2": "{key} ({value})", "ui.keyValuePairV3": "{key}, {value}", diff --git a/airbyte-webapp/src/utils/testutils.tsx b/airbyte-webapp/src/utils/testutils.tsx index ab6a2f354d14c3..9f9e50e70fc903 100644 --- a/airbyte-webapp/src/utils/testutils.tsx +++ b/airbyte-webapp/src/utils/testutils.tsx @@ -1,5 +1,4 @@ -import { act, Queries, render as rtlRender, RenderResult } from "@testing-library/react"; -import { History } from "history"; +import { act, Queries, queries, render as rtlRender, RenderOptions, RenderResult } from "@testing-library/react"; import React from "react"; import { IntlProvider } from "react-intl"; import { MemoryRouter } from "react-router-dom"; @@ -9,20 +8,14 @@ import { configContext, defaultConfig } from "config"; import { FeatureService } from "hooks/services/Feature"; import en from "locales/en.json"; -export type RenderOptions = { - // optionally pass in a history object to control routes in the test - history?: History; - container?: HTMLElement; -}; - type WrapperProps = { - children?: React.ReactNode; + children?: React.ReactElement; }; -export async function render( - ui: React.ReactNode, - renderOptions?: RenderOptions -): Promise> { +export async function render< + Q extends Queries = typeof queries, + Container extends Element | DocumentFragment = HTMLElement +>(ui: React.ReactNode, renderOptions?: RenderOptions): Promise> { function Wrapper({ children }: WrapperProps) { return ( @@ -35,9 +28,9 @@ export async function render( ); } - let renderResult: RenderResult; + let renderResult: RenderResult; await act(async () => { - renderResult = await rtlRender(
{ui}
, { wrapper: Wrapper, ...renderOptions }); + renderResult = await rtlRender(
{ui}
, { wrapper: Wrapper, ...renderOptions }); }); return renderResult!; From 915573e8989fe18f47a527500cdc92c1ca606c6e Mon Sep 17 00:00:00 2001 From: midavadim Date: Mon, 2 May 2022 19:37:05 +0300 Subject: [PATCH 048/152] :tada: Source Tiktok Marketing - updated specs description (#12435) * updated seed files * updated change history * updated docs * updated connector version * auto-bump connector version * updated source specs Co-authored-by: Octavia Squidington III --- .../resources/seed/source_definitions.yaml | 2 +- .../src/main/resources/seed/source_specs.yaml | 19 ++++++++--------- .../source-tiktok-marketing/Dockerfile | 2 +- .../integration_tests/spec.json | 21 ++++++++----------- .../source_tiktok_marketing/spec.py | 13 ++++++------ docs/integrations/sources/tiktok-marketing.md | 5 +++-- 6 files changed, 29 insertions(+), 33 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index 2da12aedfb702f..fd9121924be3f1 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -799,7 +799,7 @@ - name: TikTok Marketing sourceDefinitionId: 4bfac00d-ce15-44ff-95b9-9e3c3e8fbd35 dockerRepository: airbyte/source-tiktok-marketing - dockerImageTag: 0.1.7 + dockerImageTag: 0.1.8 documentationUrl: https://docs.airbyte.io/integrations/sources/tiktok-marketing icon: tiktok.svg sourceType: api diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index 95d0bd48606802..65386e30285c6c 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -8666,7 +8666,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-tiktok-marketing:0.1.7" +- dockerImage: "airbyte/source-tiktok-marketing:0.1.8" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/tiktok-marketing" changelogUrl: "https://docs.airbyte.io/integrations/sources/tiktok-marketing" @@ -8700,7 +8700,7 @@ type: "string" access_token: title: "Access Token" - description: "Long-term Authorized Access Token." + description: "The long-term authorized access token." airbyte_secret: true type: "string" required: @@ -8726,7 +8726,7 @@ type: "string" access_token: title: "Access Token" - description: "The Long-term Authorized Access Token." + description: "The long-term authorized access token." airbyte_secret: true type: "string" required: @@ -8743,12 +8743,12 @@ type: "string" advertiser_id: title: "Advertiser ID" - description: "The Advertiser ID which generated for the developer's\ + description: "The Advertiser ID which generated for the developer's\ \ Sandbox application." type: "string" access_token: title: "Access Token" - description: "The Long-term Authorized Access Token." + description: "The long-term authorized access token." airbyte_secret: true type: "string" required: @@ -8757,17 +8757,16 @@ start_date: title: "Start Date *" description: "The Start Date in format: YYYY-MM-DD. Any data before this\ - \ date will not be replicated. If this parameter is not set, all data\ - \ will be replicated." + \ date will not be replicated.If this parameter is not set, all data will\ + \ be replicated." default: "2016-09-01" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" order: 1 type: "string" report_granularity: title: "Report Granularity *" - description: "Which time granularity should be grouped by; for LIFETIME\ - \ there will be no grouping. This option is used for reports' streams\ - \ only." + description: "Grouping of your reports based on time. Lifetime will have\ + \ no grouping. This option is used for reports' streams only." default: "DAY" enum: - "LIFETIME" diff --git a/airbyte-integrations/connectors/source-tiktok-marketing/Dockerfile b/airbyte-integrations/connectors/source-tiktok-marketing/Dockerfile index 5524acd9717b7c..d9fcada3aa5d46 100644 --- a/airbyte-integrations/connectors/source-tiktok-marketing/Dockerfile +++ b/airbyte-integrations/connectors/source-tiktok-marketing/Dockerfile @@ -32,5 +32,5 @@ COPY source_tiktok_marketing ./source_tiktok_marketing ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.7 +LABEL io.airbyte.version=0.1.8 LABEL io.airbyte.name=airbyte/source-tiktok-marketing diff --git a/airbyte-integrations/connectors/source-tiktok-marketing/integration_tests/spec.json b/airbyte-integrations/connectors/source-tiktok-marketing/integration_tests/spec.json index da6cad26a536c7..e008d0a54887e5 100644 --- a/airbyte-integrations/connectors/source-tiktok-marketing/integration_tests/spec.json +++ b/airbyte-integrations/connectors/source-tiktok-marketing/integration_tests/spec.json @@ -10,8 +10,7 @@ "default": {}, "order": 0, "type": "object", - "oneOf": [ - { + "oneOf": [{ "title": "OAuth2.0", "type": "object", "properties": { @@ -35,14 +34,13 @@ }, "access_token": { "title": "Access Token", - "description": "Long-term Authorized Access Token.", + "description": "The long-term authorized access token.", "airbyte_secret": true, "type": "string" } }, "required": ["app_id", "secret", "access_token"] - }, - { + }, { "title": "Production Access Token", "type": "object", "properties": { @@ -65,14 +63,13 @@ }, "access_token": { "title": "Access Token", - "description": "The Long-term Authorized Access Token.", + "description": "The long-term authorized access token.", "airbyte_secret": true, "type": "string" } }, "required": ["app_id", "secret", "access_token"] - }, - { + }, { "title": "Sandbox Access Token", "type": "object", "properties": { @@ -84,12 +81,12 @@ }, "advertiser_id": { "title": "Advertiser ID", - "description": "The Advertiser ID which generated for the developer's Sandbox application.", + "description": "The Advertiser ID which generated for the developer's Sandbox application.", "type": "string" }, "access_token": { "title": "Access Token", - "description": "The Long-term Authorized Access Token.", + "description": "The long-term authorized access token.", "airbyte_secret": true, "type": "string" } @@ -100,7 +97,7 @@ }, "start_date": { "title": "Start Date *", - "description": "The Start Date in format: YYYY-MM-DD. Any data before this date will not be replicated. If this parameter is not set, all data will be replicated.", + "description": "The Start Date in format: YYYY-MM-DD. Any data before this date will not be replicated.If this parameter is not set, all data will be replicated.", "default": "2016-09-01", "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}$", "order": 1, @@ -108,7 +105,7 @@ }, "report_granularity": { "title": "Report Granularity *", - "description": "Which time granularity should be grouped by; for LIFETIME there will be no grouping. This option is used for reports' streams only.", + "description": "Grouping of your reports based on time. Lifetime will have no grouping. This option is used for reports' streams only.", "default": "DAY", "enum": ["LIFETIME", "DAY", "HOUR"], "order": 2, diff --git a/airbyte-integrations/connectors/source-tiktok-marketing/source_tiktok_marketing/spec.py b/airbyte-integrations/connectors/source-tiktok-marketing/source_tiktok_marketing/spec.py index f33e829befff06..6ce93d4b24e275 100644 --- a/airbyte-integrations/connectors/source-tiktok-marketing/source_tiktok_marketing/spec.py +++ b/airbyte-integrations/connectors/source-tiktok-marketing/source_tiktok_marketing/spec.py @@ -23,7 +23,7 @@ class Config: secret: str = Field(title="Secret", description="The private key of the developer's application.", airbyte_secret=True) - access_token: str = Field(title="Access Token", description="Long-term Authorized Access Token.", airbyte_secret=True) + access_token: str = Field(title="Access Token", description="The long-term authorized access token.", airbyte_secret=True) class SandboxEnvSpec(BaseModel): @@ -34,10 +34,10 @@ class Config: # it is string because UI has the bug https://github.com/airbytehq/airbyte/issues/6875 advertiser_id: str = Field( - title="Advertiser ID", description="The Advertiser ID which generated for the developer's Sandbox application." + title="Advertiser ID", description="The Advertiser ID which generated for the developer's Sandbox application." ) - access_token: str = Field(title="Access Token", description="The Long-term Authorized Access Token.", airbyte_secret=True) + access_token: str = Field(title="Access Token", description="The long-term authorized access token.", airbyte_secret=True) class ProductionEnvSpec(BaseModel): @@ -50,7 +50,7 @@ class Config: app_id: str = Field(description="The App ID applied by the developer.", title="App ID") secret: str = Field(title="Secret", description="The private key of the developer application.", airbyte_secret=True) - access_token: str = Field(title="Access Token", description="The Long-term Authorized Access Token.", airbyte_secret=True) + access_token: str = Field(title="Access Token", description="The long-term authorized access token.", airbyte_secret=True) class SourceTiktokMarketingSpec(BaseModel): @@ -65,15 +65,14 @@ class Config: title="Start Date *", default=DEFAULT_START_DATE, pattern="^[0-9]{4}-[0-9]{2}-[0-9]{2}$", - description="The Start Date in format: YYYY-MM-DD. Any data before this date will not be replicated. " + description="The Start Date in format: YYYY-MM-DD. Any data before this date will not be replicated." "If this parameter is not set, all data will be replicated.", order=1, ) report_granularity: str = Field( title="Report Granularity *", - description="Which time granularity should be grouped by; for LIFETIME there will be no grouping. " - "This option is used for reports' streams only.", + description="Grouping of your reports based on time. Lifetime will have no grouping. This option is used for reports' streams only.", default=ReportGranularity.default().value, enum=[g.value for g in ReportGranularity], order=2, diff --git a/docs/integrations/sources/tiktok-marketing.md b/docs/integrations/sources/tiktok-marketing.md index 9ae8455bfcad27..bc8aaef785493c 100644 --- a/docs/integrations/sources/tiktok-marketing.md +++ b/docs/integrations/sources/tiktok-marketing.md @@ -3,6 +3,8 @@ This page guides you through the process of setting up the TikTok Marketing source connector. ## Prerequisites +* Start date +* Report Granularity (LIFETIME, DAY, HOUR) For Production environment: * Access token @@ -13,8 +15,6 @@ For Sandbox environment: * Access token * Advertiser ID -* Start date -* Report Granularity (LIFETIME, DAY, HOUR) ## Step 1: Set up TikTok @@ -521,6 +521,7 @@ The connector is restricted by [requests limitation](https://ads.tiktok.com/mark | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:----------------------------------------------------------------------------------------------| +| 0.1.8 | 2022-04-28 | [12435](https://github.com/airbytehq/airbyte/pull/12435) | updated spec descriptions | | 0.1.7 | 2022-04-27 | [12380](https://github.com/airbytehq/airbyte/pull/12380) | fixed spec descriptions and documentation | | 0.1.6 | 2022-04-19 | [11378](https://github.com/airbytehq/airbyte/pull/11378) | updated logic for stream initializations, fixed errors in schemas, updated SAT and unit tests | | 0.1.5 | 2022-02-17 | [10398](https://github.com/airbytehq/airbyte/pull/10398) | Add Audience reports | From de7035171de958b62523a901ed801bde8c3552ee Mon Sep 17 00:00:00 2001 From: Jonathan Pearlin Date: Mon, 2 May 2022 14:08:18 -0400 Subject: [PATCH 049/152] Add utility classes for database object creation (#12445) * Add utility classes for database object creation * Remove unused variable --- airbyte-db/lib/build.gradle | 1 + .../main/java/io/airbyte/db/Databases.java | 11 + .../airbyte/db/factory/DSLContextFactory.java | 32 +++ .../airbyte/db/factory/DataSourceFactory.java | 251 ++++++++++++++++++ .../io/airbyte/db/factory/FlywayFactory.java | 53 ++++ .../db/factory/AbstractFactoryTest.java | 34 +++ .../db/factory/DSLContextFactoryTest.java | 31 +++ .../db/factory/DataSourceFactoryTest.java | 103 +++++++ .../airbyte/db/factory/FlywayFactoryTest.java | 39 +++ 9 files changed, 555 insertions(+) create mode 100644 airbyte-db/lib/src/main/java/io/airbyte/db/factory/DSLContextFactory.java create mode 100644 airbyte-db/lib/src/main/java/io/airbyte/db/factory/DataSourceFactory.java create mode 100644 airbyte-db/lib/src/main/java/io/airbyte/db/factory/FlywayFactory.java create mode 100644 airbyte-db/lib/src/test/java/io/airbyte/db/factory/AbstractFactoryTest.java create mode 100644 airbyte-db/lib/src/test/java/io/airbyte/db/factory/DSLContextFactoryTest.java create mode 100644 airbyte-db/lib/src/test/java/io/airbyte/db/factory/DataSourceFactoryTest.java create mode 100644 airbyte-db/lib/src/test/java/io/airbyte/db/factory/FlywayFactoryTest.java diff --git a/airbyte-db/lib/build.gradle b/airbyte-db/lib/build.gradle index 8c5d69903d89c0..3d52778abfcf67 100644 --- a/airbyte-db/lib/build.gradle +++ b/airbyte-db/lib/build.gradle @@ -4,6 +4,7 @@ plugins { dependencies { api 'org.apache.commons:commons-dbcp2:2.7.0' + api 'com.zaxxer:HikariCP:5.0.1' api 'org.jooq:jooq-meta:3.13.4' api 'org.jooq:jooq:3.13.4' api 'org.postgresql:postgresql:42.2.18' diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/Databases.java b/airbyte-db/lib/src/main/java/io/airbyte/db/Databases.java index 1e193409e042ec..2df0b624f0794a 100644 --- a/airbyte-db/lib/src/main/java/io/airbyte/db/Databases.java +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/Databases.java @@ -24,6 +24,17 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +/** + * Provides utility methods to create configured {@link Database} instances. + * + * @deprecated This class has been marked as deprecated as we move to using an application framework + * to manage resources. This class will be removed in a future release. + * + * @see io.airbyte.db.factory.DataSourceFactory + * @see io.airbyte.db.factory.DSLContextFactory + * @see io.airbyte.db.factory.FlywayFactory + */ +@Deprecated(forRemoval = true) public class Databases { private static final Logger LOGGER = LoggerFactory.getLogger(Databases.class); diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/factory/DSLContextFactory.java b/airbyte-db/lib/src/main/java/io/airbyte/db/factory/DSLContextFactory.java new file mode 100644 index 00000000000000..eba32e7cb62017 --- /dev/null +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/factory/DSLContextFactory.java @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.factory; + +import javax.sql.DataSource; +import org.jooq.DSLContext; +import org.jooq.SQLDialect; +import org.jooq.impl.DSL; + +/** + * Temporary factory class that provides convenience methods for creating a {@link DSLContext} + * instances. This class will be removed once the project has been converted to leverage an + * application framework to manage the creation and injection of {@link DSLContext} objects. + * + * This class replaces direct calls to {@link io.airbyte.db.Databases}. + */ +public class DSLContextFactory { + + /** + * Constructs a configured {@link DSLContext} instance using the provided configuration. + * + * @param dataSource The {@link DataSource} used to connect to the database. + * @param dialect The SQL dialect to use with objects created from this context. + * @return The configured {@link DSLContext}. + */ + public static DSLContext create(final DataSource dataSource, final SQLDialect dialect) { + return DSL.using(dataSource, dialect); + } + +} diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/factory/DataSourceFactory.java b/airbyte-db/lib/src/main/java/io/airbyte/db/factory/DataSourceFactory.java new file mode 100644 index 00000000000000..31c659a548cfb3 --- /dev/null +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/factory/DataSourceFactory.java @@ -0,0 +1,251 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.factory; + +import com.zaxxer.hikari.HikariConfig; +import com.zaxxer.hikari.HikariDataSource; +import java.util.Map; +import javax.sql.DataSource; + +/** + * Temporary factory class that provides convenience methods for creating a {@link DataSource} + * instance. This class will be removed once the project has been converted to leverage an + * application framework to manage the creation and injection of {@link DataSource} objects. + * + * This class replaces direct calls to {@link io.airbyte.db.Databases}. + */ +public class DataSourceFactory { + + /** + * Constructs a new {@link DataSource} using the provided configuration. + * + * @param username The username of the database user. + * @param password The password of the database user. + * @param driverClassName The fully qualified name of the JDBC driver class. + * @param jdbcConnectionString The JDBC connection string. + * @return The configured {@link DataSource}. + */ + public static DataSource create(final String username, + final String password, + final String driverClassName, + final String jdbcConnectionString) { + return new DataSourceBuilder() + .withDriverClassName(driverClassName) + .withJdbcUrl(jdbcConnectionString) + .withPassword(password) + .withUsername(username) + .build(); + } + + /** + * Constructs a new {@link DataSource} using the provided configuration. + * + * @param username The username of the database user. + * @param password The password of the database user. + * @param driverClassName The fully qualified name of the JDBC driver class. + * @param jdbcConnectionString The JDBC connection string. + * @param connectionProperties Additional configuration properties for the underlying driver. + * @return The configured {@link DataSource}. + */ + public static DataSource create(final String username, + final String password, + final String driverClassName, + final String jdbcConnectionString, + final Map connectionProperties) { + return new DataSourceBuilder() + .withConnectionProperties(connectionProperties) + .withDriverClassName(driverClassName) + .withJdbcUrl(jdbcConnectionString) + .withPassword(password) + .withUsername(username) + .build(); + } + + /** + * Constructs a new {@link DataSource} using the provided configuration. + * + * @param username The username of the database user. + * @param password The password of the database user. + * @param host The host address of the database. + * @param port The port of the database. + * @param database The name of the database. + * @param driverClassName The fully qualified name of the JDBC driver class. + * @return The configured {@link DataSource}. + */ + public static DataSource create(final String username, + final String password, + final String host, + final int port, + final String database, + final String driverClassName) { + return new DataSourceBuilder() + .withDatabase(database) + .withDriverClassName(driverClassName) + .withHost(host) + .withPort(port) + .withPassword(password) + .withUsername(username) + .build(); + } + + /** + * Constructs a new {@link DataSource} using the provided configuration. + * + * @param username The username of the database user. + * @param password The password of the database user. + * @param host The host address of the database. + * @param port The port of the database. + * @param database The name of the database. + * @param driverClassName The fully qualified name of the JDBC driver class. + * @param connectionProperties Additional configuration properties for the underlying driver. + * @return The configured {@link DataSource}. + */ + public static DataSource create(final String username, + final String password, + final String host, + final int port, + final String database, + final String driverClassName, + final Map connectionProperties) { + return new DataSourceBuilder() + .withConnectionProperties(connectionProperties) + .withDatabase(database) + .withDriverClassName(driverClassName) + .withHost(host) + .withPort(port) + .withPassword(password) + .withUsername(username) + .build(); + } + + /** + * Convenience method that constructs a new {@link DataSource} for a PostgreSQL database using the + * provided configuration. + * + * @param username The username of the database user. + * @param password The password of the database user. + * @param host The host address of the database. + * @param port The port of the database. + * @param database The name of the database. + * @return The configured {@link DataSource}. + */ + public static DataSource createPostgres(final String username, + final String password, + final String host, + final int port, + final String database) { + return new DataSourceBuilder() + .withDatabase(database) + .withDriverClassName("org.postgresql.Driver") + .withHost(host) + .withPort(port) + .withPassword(password) + .withUsername(username) + .build(); + } + + /** + * Builder class used to configure and construct {@link DataSource} instances. + */ + private static class DataSourceBuilder { + + private static final Map JDBC_URL_FORMATS = Map.of("org.postgresql.Driver", "jdbc:postgresql://%s:%d/%s", + "com.amazon.redshift.jdbc.Driver", "jdbc:redshift://%s:%d/%s", + "com.mysql.cj.jdbc.Driver", "jdbc:mysql://%s:%d/%s", + "com.microsoft.sqlserver.jdbc.SQLServerDriver", "jdbc:sqlserver://%s:%d/%s", + "oracle.jdbc.OracleDriver", "jdbc:oracle:thin:@%s:%d:%s", + "ru.yandex.clickhouse.ClickHouseDriver", "jdbc:ch://%s:%d/%s", + "org.mariadb.jdbc.Driver", "jdbc:mariadb://%s:%d/%s"); + + private Map connectionProperties = Map.of(); + private String database; + private String driverClassName = "org.postgresql.Driver"; + private String host; + private String jdbcUrl; + private Integer maximumPoolSize = 5; + private Integer minimumPoolSize = 0; + private String password; + private Integer port = 5432; + private String username; + + private DataSourceBuilder() {} + + public DataSourceBuilder withConnectionProperties(final Map connectionProperties) { + if (connectionProperties != null) { + this.connectionProperties = connectionProperties; + } + return this; + } + + public DataSourceBuilder withDatabase(final String database) { + this.database = database; + return this; + } + + public DataSourceBuilder withDriverClassName(final String driverClassName) { + this.driverClassName = driverClassName; + return this; + } + + public DataSourceBuilder withHost(final String host) { + this.host = host; + return this; + } + + public DataSourceBuilder withJdbcUrl(final String jdbcUrl) { + this.jdbcUrl = jdbcUrl; + return this; + } + + public DataSourceBuilder withMaximumPoolSize(final Integer maximumPoolSize) { + if (maximumPoolSize != null) { + this.maximumPoolSize = maximumPoolSize; + } + return this; + } + + public DataSourceBuilder withMinimumPoolSize(final Integer minimumPoolSize) { + if (minimumPoolSize != null) { + this.minimumPoolSize = minimumPoolSize; + } + return this; + } + + public DataSourceBuilder withPassword(final String password) { + this.password = password; + return this; + } + + public DataSourceBuilder withPort(final Integer port) { + if (port != null) { + this.port = port; + } + return this; + } + + public DataSourceBuilder withUsername(final String username) { + this.username = username; + return this; + } + + public DataSource build() { + final HikariConfig config = new HikariConfig(); + config.setDriverClassName(driverClassName); + config.setJdbcUrl(jdbcUrl != null ? jdbcUrl : String.format(JDBC_URL_FORMATS.getOrDefault(driverClassName, ""), host, port, database)); + config.setMaximumPoolSize(maximumPoolSize); + config.setMinimumIdle(minimumPoolSize); + config.setPassword(password); + config.setUsername(username); + + connectionProperties.forEach(config::addDataSourceProperty); + + final HikariDataSource dataSource = new HikariDataSource(config); + dataSource.validate(); + return dataSource; + } + + } + +} diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/factory/FlywayFactory.java b/airbyte-db/lib/src/main/java/io/airbyte/db/factory/FlywayFactory.java new file mode 100644 index 00000000000000..0e5526745fd94d --- /dev/null +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/factory/FlywayFactory.java @@ -0,0 +1,53 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.factory; + +import javax.sql.DataSource; +import org.flywaydb.core.Flyway; + +/** + * Temporary factory class that provides convenience methods for creating a {@link Flyway} + * instances. This class will be removed once the project has been converted to leverage an + * application framework to manage the creation and injection of {@link Flyway} objects. + * + * This class replaces direct calls to {@link io.airbyte.db.Databases}. + */ +public class FlywayFactory { + + static final String MIGRATION_TABLE_FORMAT = "airbyte_%s_migrations"; + + // Constants for Flyway baseline. See here for details: + // https://flywaydb.org/documentation/command/baseline + static final String BASELINE_VERSION = "0.29.0.001"; + static final String BASELINE_DESCRIPTION = "Baseline from file-based migration v1"; + static final boolean BASELINE_ON_MIGRATION = true; + + /** + * Constructs a configured {@link Flyway} instance using the provided configuration. + * + * @param dataSource The {@link DataSource} used to connect to the database. + * @param installedBy The name of the module performing the migration. + * @param dbIdentifier The name of the database to be migrated. This is used to name the table to + * hold the migration history for the database. + * @param migrationFileLocations The array of migration files to be used. + * @return The configured {@link Flyway} instance. + */ + public static Flyway create(final DataSource dataSource, + final String installedBy, + final String dbIdentifier, + final String... migrationFileLocations) { + return Flyway.configure() + .dataSource(dataSource) + .baselineVersion(BASELINE_VERSION) + .baselineDescription(BASELINE_DESCRIPTION) + .baselineOnMigrate(BASELINE_ON_MIGRATION) + .installedBy(installedBy) + .table(String.format(MIGRATION_TABLE_FORMAT, dbIdentifier)) + .locations(migrationFileLocations) + .load(); + + } + +} diff --git a/airbyte-db/lib/src/test/java/io/airbyte/db/factory/AbstractFactoryTest.java b/airbyte-db/lib/src/test/java/io/airbyte/db/factory/AbstractFactoryTest.java new file mode 100644 index 00000000000000..25f8b4c4ca3ed8 --- /dev/null +++ b/airbyte-db/lib/src/test/java/io/airbyte/db/factory/AbstractFactoryTest.java @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.factory; + +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.testcontainers.containers.PostgreSQLContainer; + +/** + * Common test suite for the classes found in the {@code io.airbyte.db.factory} package. + */ +public abstract class AbstractFactoryTest { + + private static final String DATABASE_NAME = "airbyte_test_database"; + + protected static PostgreSQLContainer container; + + @BeforeAll + public static void dbSetup() { + container = new PostgreSQLContainer<>("postgres:13-alpine") + .withDatabaseName(DATABASE_NAME) + .withUsername("docker") + .withPassword("docker"); + container.start(); + } + + @AfterAll + public static void dbDown() { + container.close(); + } + +} diff --git a/airbyte-db/lib/src/test/java/io/airbyte/db/factory/DSLContextFactoryTest.java b/airbyte-db/lib/src/test/java/io/airbyte/db/factory/DSLContextFactoryTest.java new file mode 100644 index 00000000000000..b4bae85c24f921 --- /dev/null +++ b/airbyte-db/lib/src/test/java/io/airbyte/db/factory/DSLContextFactoryTest.java @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.factory; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +import javax.sql.DataSource; +import org.jooq.DSLContext; +import org.jooq.SQLDialect; +import org.junit.jupiter.api.Test; +import org.postgresql.Driver; + +/** + * Test suite for the {@link DSLContextFactory} class. + */ +public class DSLContextFactoryTest extends AbstractFactoryTest { + + @Test + void testCreatingADslContext() { + final DataSource dataSource = + DataSourceFactory.create(container.getUsername(), container.getPassword(), Driver.class.getName(), container.getJdbcUrl()); + final SQLDialect dialect = SQLDialect.POSTGRES; + final DSLContext dslContext = DSLContextFactory.create(dataSource, dialect); + assertNotNull(dslContext); + assertEquals(dialect, dslContext.configuration().dialect()); + } + +} diff --git a/airbyte-db/lib/src/test/java/io/airbyte/db/factory/DataSourceFactoryTest.java b/airbyte-db/lib/src/test/java/io/airbyte/db/factory/DataSourceFactoryTest.java new file mode 100644 index 00000000000000..4cfe7cc1412453 --- /dev/null +++ b/airbyte-db/lib/src/test/java/io/airbyte/db/factory/DataSourceFactoryTest.java @@ -0,0 +1,103 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.factory; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; + +import com.zaxxer.hikari.HikariDataSource; +import java.util.Map; +import javax.sql.DataSource; +import org.junit.jupiter.api.Test; +import org.postgresql.Driver; + +/** + * Test suite for the {@link DataSourceFactory} class. + */ +public class DataSourceFactoryTest extends AbstractFactoryTest { + + @Test + void testCreatingADataSourceWithJdbcUrl() { + final String username = container.getUsername(); + final String password = container.getPassword(); + final String driverClassName = Driver.class.getName(); + final String jdbcUrl = container.getJdbcUrl(); + + final DataSource dataSource = DataSourceFactory.create(username, password, driverClassName, jdbcUrl); + assertNotNull(dataSource); + assertEquals(HikariDataSource.class, dataSource.getClass()); + } + + @Test + void testCreatingADataSourceWithJdbcUrlAndConnectionProperties() { + final String username = container.getUsername(); + final String password = container.getPassword(); + final String driverClassName = Driver.class.getName(); + final String jdbcUrl = container.getJdbcUrl(); + final Map connectionProperties = Map.of("foo", "bar"); + + final DataSource dataSource = DataSourceFactory.create(username, password, driverClassName, jdbcUrl, connectionProperties); + assertNotNull(dataSource); + assertEquals(HikariDataSource.class, dataSource.getClass()); + } + + @Test + void testCreatingADataSourceWithHostAndPort() { + final String username = container.getUsername(); + final String password = container.getPassword(); + final String driverClassName = Driver.class.getName(); + final String host = container.getHost(); + final Integer port = container.getFirstMappedPort(); + final String database = container.getDatabaseName(); + + final DataSource dataSource = DataSourceFactory.create(username, password, host, port, database, driverClassName); + assertNotNull(dataSource); + assertEquals(HikariDataSource.class, dataSource.getClass()); + } + + @Test + void testCreatingADataSourceWithHostPortAndConnectionProperties() { + final String username = container.getUsername(); + final String password = container.getPassword(); + final String driverClassName = Driver.class.getName(); + final String host = container.getHost(); + final Integer port = container.getFirstMappedPort(); + final String database = container.getDatabaseName(); + final Map connectionProperties = Map.of("foo", "bar"); + + final DataSource dataSource = DataSourceFactory.create(username, password, host, port, database, driverClassName, connectionProperties); + assertNotNull(dataSource); + assertEquals(HikariDataSource.class, dataSource.getClass()); + } + + @Test + void testCreatingAnInvalidDataSourceWithHostAndPort() { + final String username = container.getUsername(); + final String password = container.getPassword(); + final String driverClassName = "Unknown"; + final String host = container.getHost(); + final Integer port = container.getFirstMappedPort(); + final String database = container.getDatabaseName(); + + assertThrows(RuntimeException.class, () -> { + DataSourceFactory.create(username, password, host, port, database, driverClassName); + }); + } + + @Test + void testCreatingAPostgresqlDataSource() { + final String username = container.getUsername(); + final String password = container.getPassword(); + final String host = container.getHost(); + final Integer port = container.getFirstMappedPort(); + final String database = container.getDatabaseName(); + + final DataSource dataSource = DataSourceFactory.createPostgres(username, password, host, port, database); + assertNotNull(dataSource); + assertEquals(HikariDataSource.class, dataSource.getClass()); + } + +} diff --git a/airbyte-db/lib/src/test/java/io/airbyte/db/factory/FlywayFactoryTest.java b/airbyte-db/lib/src/test/java/io/airbyte/db/factory/FlywayFactoryTest.java new file mode 100644 index 00000000000000..2c2913261b2839 --- /dev/null +++ b/airbyte-db/lib/src/test/java/io/airbyte/db/factory/FlywayFactoryTest.java @@ -0,0 +1,39 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.factory; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import javax.sql.DataSource; +import org.flywaydb.core.Flyway; +import org.junit.jupiter.api.Test; +import org.postgresql.Driver; + +/** + * Test suite for the {@link FlywayFactory} class. + */ +public class FlywayFactoryTest extends AbstractFactoryTest { + + @Test + void testCreatingAFlywayInstance() { + final String installedBy = "test"; + final String dbIdentifier = "test"; + final String migrationFileLocation = "classpath:io/airbyte/db/instance/toys/migrations"; + final DataSource dataSource = + DataSourceFactory.create(container.getUsername(), container.getPassword(), Driver.class.getName(), container.getJdbcUrl()); + + final Flyway flyway = FlywayFactory.create(dataSource, installedBy, dbIdentifier, migrationFileLocation); + assertNotNull(flyway); + assertTrue(flyway.getConfiguration().isBaselineOnMigrate()); + assertEquals(FlywayFactory.BASELINE_DESCRIPTION, flyway.getConfiguration().getBaselineDescription()); + assertEquals(FlywayFactory.BASELINE_VERSION, flyway.getConfiguration().getBaselineVersion().getVersion()); + assertEquals(installedBy, flyway.getConfiguration().getInstalledBy()); + assertEquals(String.format(FlywayFactory.MIGRATION_TABLE_FORMAT, dbIdentifier), flyway.getConfiguration().getTable()); + assertEquals(migrationFileLocation, flyway.getConfiguration().getLocations()[0].getDescriptor()); + } + +} From 4ab93e5ca94239aaf6b4466cef8b846b38d341bc Mon Sep 17 00:00:00 2001 From: terencecho Date: Mon, 2 May 2022 12:13:09 -0700 Subject: [PATCH 050/152] Ignore auto-disable activity if connection already inactive (#12507) * Ignore auto disable activity if already inactive * fix formatting * fix unit test --- .../AutoDisableConnectionActivityImpl.java | 15 +++++++++------ .../AutoDisableConnectionActivityTest.java | 3 ++- 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityImpl.java index 80e387c1b0fafb..af71dd0b75f586 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityImpl.java @@ -13,7 +13,6 @@ import io.airbyte.config.Configs; import io.airbyte.config.StandardSync; import io.airbyte.config.StandardSync.Status; -import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; import io.airbyte.scheduler.models.Job; import io.airbyte.scheduler.models.JobStatus; @@ -25,7 +24,6 @@ import java.io.IOException; import java.util.List; import java.util.Optional; -import java.util.UUID; import java.util.concurrent.TimeUnit; import lombok.AllArgsConstructor; @@ -50,6 +48,12 @@ public class AutoDisableConnectionActivityImpl implements AutoDisableConnectionA public AutoDisableConnectionOutput autoDisableFailingConnection(final AutoDisableConnectionActivityInput input) { if (featureFlags.autoDisablesFailingConnections()) { try { + // if connection is already inactive, no need to disable + final StandardSync standardSync = configRepository.getStandardSync(input.getConnectionId()); + if (standardSync.getStatus() == Status.INACTIVE) { + return new AutoDisableConnectionOutput(false); + } + final int maxDaysOfOnlyFailedJobs = configs.getMaxDaysOfOnlyFailedJobsBeforeConnectionDisable(); final int maxDaysOfOnlyFailedJobsBeforeWarning = maxDaysOfOnlyFailedJobs / 2; final int maxFailedJobsInARowBeforeConnectionDisableWarning = configs.getMaxFailedJobsInARowBeforeConnectionDisable() / 2; @@ -82,7 +86,7 @@ public AutoDisableConnectionOutput autoDisableFailingConnection(final AutoDisabl return new AutoDisableConnectionOutput(false); } else if (numFailures >= configs.getMaxFailedJobsInARowBeforeConnectionDisable()) { // disable connection if max consecutive failed jobs limit has been hit - disableConnection(input.getConnectionId(), lastJob); + disableConnection(standardSync, lastJob); return new AutoDisableConnectionOutput(true); } else if (numFailures == maxFailedJobsInARowBeforeConnectionDisableWarning && !warningPreviouslySentForMaxDays) { // warn if number of consecutive failures hits 50% of MaxFailedJobsInARow @@ -102,7 +106,7 @@ public AutoDisableConnectionOutput autoDisableFailingConnection(final AutoDisabl // disable connection if only failed jobs in the past maxDaysOfOnlyFailedJobs days if (firstReplicationOlderThanMaxDisableDays && noPreviousSuccess) { - disableConnection(input.getConnectionId(), lastJob); + disableConnection(standardSync, lastJob); return new AutoDisableConnectionOutput(true); } @@ -172,8 +176,7 @@ private int getDaysSinceTimestamp(final long currentTimestampInSeconds, final lo return Math.toIntExact(TimeUnit.SECONDS.toDays(currentTimestampInSeconds - timestampInSeconds)); } - private void disableConnection(final UUID connectionId, final Job lastJob) throws JsonValidationException, IOException, ConfigNotFoundException { - final StandardSync standardSync = configRepository.getStandardSync(connectionId); + private void disableConnection(final StandardSync standardSync, final Job lastJob) throws JsonValidationException, IOException { standardSync.setStatus(Status.INACTIVE); configRepository.writeStandardSync(standardSync); diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityTest.java index 36bc505899aec5..6423f2d8b484de 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityTest.java @@ -82,7 +82,8 @@ class AutoDisableConnectionActivityTest { private final StandardSync standardSync = new StandardSync(); @BeforeEach - void setUp() throws IOException { + void setUp() throws IOException, JsonValidationException, ConfigNotFoundException { + Mockito.when(mConfigRepository.getStandardSync(CONNECTION_ID)).thenReturn(standardSync); standardSync.setStatus(Status.ACTIVE); Mockito.when(mFeatureFlags.autoDisablesFailingConnections()).thenReturn(true); Mockito.when(mConfigs.getMaxDaysOfOnlyFailedJobsBeforeConnectionDisable()).thenReturn(MAX_DAYS_OF_ONLY_FAILED_JOBS); From c609330990c205357c4f7f0ad277cffdc710b442 Mon Sep 17 00:00:00 2001 From: Pedro Lopez Date: Mon, 2 May 2022 16:06:01 -0400 Subject: [PATCH 051/152] add change-case dependency to package-lock fixes broken master build --- .../connector-templates/generator/package-lock.json | 1 + 1 file changed, 1 insertion(+) diff --git a/airbyte-integrations/connector-templates/generator/package-lock.json b/airbyte-integrations/connector-templates/generator/package-lock.json index 0fb11c7cd76cb5..9a26b438e72cda 100644 --- a/airbyte-integrations/connector-templates/generator/package-lock.json +++ b/airbyte-integrations/connector-templates/generator/package-lock.json @@ -9,6 +9,7 @@ "version": "0.1.0", "devDependencies": { "capital-case": "^1.0.4", + "change-case": "^4.1.2", "handlebars": "^4.7.7", "plop": "^3.0.5", "set-value": ">=4.0.1", From 763dea255d08c41c5c417300db0129bc95b041ee Mon Sep 17 00:00:00 2001 From: Brian Leonard Date: Mon, 2 May 2022 13:44:13 -0700 Subject: [PATCH 052/152] Uses private action for workflow (#12525) --- .github/workflows/shared-issues.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/shared-issues.yml b/.github/workflows/shared-issues.yml index e7c167af81fc09..03bbb8eb0554f6 100644 --- a/.github/workflows/shared-issues.yml +++ b/.github/workflows/shared-issues.yml @@ -7,7 +7,10 @@ jobs: shared-issues: runs-on: ubuntu-latest steps: - - uses: airbytehq/workflow-actions@production + - uses: nick-fields/private-action-loader@v3 with: + pal-repo-token: "${{ secrets.OCTAVIA_PAT }}" + pal-repo-name: airbytehq/workflow-actions@production + # the following input gets passed to the private action token: "${{ secrets.OCTAVIA_PAT }}" command: "issue" From 35023be5b04d803e5d10262db6d2b52be97ecfd8 Mon Sep 17 00:00:00 2001 From: Edmundo Ruiz Ghanem <168664+edmundito@users.noreply.github.com> Date: Mon, 2 May 2022 16:57:12 -0400 Subject: [PATCH 053/152] Fix stopColor prop in CircleLoader svg (#12513) --- airbyte-webapp/src/components/StatusIcon/CircleLoader.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/airbyte-webapp/src/components/StatusIcon/CircleLoader.tsx b/airbyte-webapp/src/components/StatusIcon/CircleLoader.tsx index 6cf80c9e2c9399..49de2a314da7a4 100644 --- a/airbyte-webapp/src/components/StatusIcon/CircleLoader.tsx +++ b/airbyte-webapp/src/components/StatusIcon/CircleLoader.tsx @@ -38,8 +38,8 @@ const CircleLoader = ({ title }: Props): JSX.Element => ( gradientUnits="userSpaceOnUse" gradientTransform="translate(0 0)" > - - + + {title && {title}} From 98e5953d8d2b87ac7fb35eb13155a218ed810f49 Mon Sep 17 00:00:00 2001 From: jordan-glitch <65691557+jordan-glitch@users.noreply.github.com> Date: Tue, 3 May 2022 07:44:21 +1000 Subject: [PATCH 054/152] :bug: Source Gitlab: allow `container_expiration_policy` to be nullable + fix null projects list (#11907) * allow nullable container_expiration_policy * Update Dockerfile * Update source_definitions.yaml * Update source_specs.yaml * fix unspecified projects * add doc update Co-authored-by: marcosmarxm --- .../init/src/main/resources/seed/source_definitions.yaml | 2 +- airbyte-config/init/src/main/resources/seed/source_specs.yaml | 2 +- airbyte-integrations/connectors/source-gitlab/Dockerfile | 2 +- .../source-gitlab/source_gitlab/schemas/projects.json | 2 +- .../connectors/source-gitlab/source_gitlab/source.py | 2 +- docs/integrations/sources/gitlab.md | 1 + 6 files changed, 6 insertions(+), 5 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index fd9121924be3f1..e94e30369edd22 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -265,7 +265,7 @@ - name: Gitlab sourceDefinitionId: 5e6175e5-68e1-4c17-bff9-56103bbb0d80 dockerRepository: airbyte/source-gitlab - dockerImageTag: 0.1.4 + dockerImageTag: 0.1.5 documentationUrl: https://docs.airbyte.io/integrations/sources/gitlab icon: gitlab.svg sourceType: api diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index 65386e30285c6c..a49160a0fd33af 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -2595,7 +2595,7 @@ path_in_connector_config: - "credentials" - "client_secret" -- dockerImage: "airbyte/source-gitlab:0.1.4" +- dockerImage: "airbyte/source-gitlab:0.1.5" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/gitlab" connectionSpecification: diff --git a/airbyte-integrations/connectors/source-gitlab/Dockerfile b/airbyte-integrations/connectors/source-gitlab/Dockerfile index 9cd9977769ff10..f831f3760d4c64 100644 --- a/airbyte-integrations/connectors/source-gitlab/Dockerfile +++ b/airbyte-integrations/connectors/source-gitlab/Dockerfile @@ -13,5 +13,5 @@ RUN pip install . ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.4 +LABEL io.airbyte.version=0.1.5 LABEL io.airbyte.name=airbyte/source-gitlab diff --git a/airbyte-integrations/connectors/source-gitlab/source_gitlab/schemas/projects.json b/airbyte-integrations/connectors/source-gitlab/source_gitlab/schemas/projects.json index b440ceb148bd83..e7b21178e791ca 100644 --- a/airbyte-integrations/connectors/source-gitlab/source_gitlab/schemas/projects.json +++ b/airbyte-integrations/connectors/source-gitlab/source_gitlab/schemas/projects.json @@ -138,7 +138,7 @@ "type": ["null", "boolean"] }, "container_expiration_policy": { - "type": "object", + "type": ["null", "object"], "properties": { "cadence": { "type": ["null", "string"] diff --git a/airbyte-integrations/connectors/source-gitlab/source_gitlab/source.py b/airbyte-integrations/connectors/source-gitlab/source_gitlab/source.py index d7dea49b254bad..d05a7188838393 100644 --- a/airbyte-integrations/connectors/source-gitlab/source_gitlab/source.py +++ b/airbyte-integrations/connectors/source-gitlab/source_gitlab/source.py @@ -43,7 +43,7 @@ def _generate_main_streams(self, config: Mapping[str, Any]) -> Tuple[GitlabStrea auth = TokenAuthenticator(token=config["private_token"]) auth_params = dict(authenticator=auth, api_url=config["api_url"]) - pids = list(filter(None, config.get("projects").split(" "))) + pids = list(filter(None, config.get("projects", "").split(" "))) gids = config.get("groups") if gids: diff --git a/docs/integrations/sources/gitlab.md b/docs/integrations/sources/gitlab.md index 8b6259b235a38a..ce17f977fe8127 100644 --- a/docs/integrations/sources/gitlab.md +++ b/docs/integrations/sources/gitlab.md @@ -63,6 +63,7 @@ GitLab source is working with GitLab API v4. It can also work with self-hosted G | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------| :--- | +| 0.1.5 | 2022-05-02 | [11907](https://github.com/airbytehq/airbyte/pull/11907) | Fix null projects param and `container_expiration_policy` | | 0.1.4 | 2022-03-23 | [11140](https://github.com/airbytehq/airbyte/pull/11140) | Ingest All Accessible Groups if not Specified in Config | | 0.1.3 | 2021-12-21 | [8991](https://github.com/airbytehq/airbyte/pull/8991) | Update connector fields title/description | | 0.1.2 | 2021-10-18 | [7108](https://github.com/airbytehq/airbyte/pull/7108) | Allow all domains to be used as `api_url` | From aab4f34ca402febcd03cdf0cc81ba9886424edc2 Mon Sep 17 00:00:00 2001 From: Ivica Taseski Date: Tue, 3 May 2022 00:58:06 +0200 Subject: [PATCH 055/152] =?UTF-8?q?=F0=9F=8E=89=20=20Source=20Mixpanel:=20?= =?UTF-8?q?Enhance=20streams=20to=20incremental=20(#11501)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * enhance streams to incremental * revert file structure * 11501 * add engage * doc and bump connector version Co-authored-by: marcosmarxm --- .../resources/seed/source_definitions.yaml | 2 +- .../src/main/resources/seed/source_specs.yaml | 2 +- .../connectors/source-mixpanel/Dockerfile | 2 +- .../source-mixpanel/source_mixpanel/source.py | 77 +++++++++++-- .../unit_tests/test_streams.py | 109 +++++++++++++++++- docs/integrations/sources/mixpanel.md | 1 + 6 files changed, 177 insertions(+), 16 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index e94e30369edd22..296d70636390b0 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -473,7 +473,7 @@ - name: Mixpanel sourceDefinitionId: 12928b32-bf0a-4f1e-964f-07e12e37153a dockerRepository: airbyte/source-mixpanel - dockerImageTag: 0.1.13 + dockerImageTag: 0.1.14 documentationUrl: https://docs.airbyte.io/integrations/sources/mixpanel icon: mixpanel.svg sourceType: api diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index a49160a0fd33af..9f925c519434d0 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -5038,7 +5038,7 @@ path_in_connector_config: - "credentials" - "client_secret" -- dockerImage: "airbyte/source-mixpanel:0.1.13" +- dockerImage: "airbyte/source-mixpanel:0.1.14" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/mixpanel" connectionSpecification: diff --git a/airbyte-integrations/connectors/source-mixpanel/Dockerfile b/airbyte-integrations/connectors/source-mixpanel/Dockerfile index 1e2c0c416bc6db..da1a91a9b1730b 100644 --- a/airbyte-integrations/connectors/source-mixpanel/Dockerfile +++ b/airbyte-integrations/connectors/source-mixpanel/Dockerfile @@ -13,5 +13,5 @@ ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.13 +LABEL io.airbyte.version=0.1.14 LABEL io.airbyte.name=airbyte/source-mixpanel diff --git a/airbyte-integrations/connectors/source-mixpanel/source_mixpanel/source.py b/airbyte-integrations/connectors/source-mixpanel/source_mixpanel/source.py index f394f09ac1dadd..d3398f8c4718c6 100644 --- a/airbyte-integrations/connectors/source-mixpanel/source_mixpanel/source.py +++ b/airbyte-integrations/connectors/source-mixpanel/source_mixpanel/source.py @@ -2,7 +2,6 @@ # Copyright (c) 2021 Airbyte, Inc., all rights reserved. # - import base64 import json import time @@ -144,9 +143,26 @@ class Cohorts(MixpanelStream): data_field: str = None primary_key: str = "id" + cursor_field = "created" + def path(self, **kwargs) -> str: return "cohorts/list" + def parse_response(self, response: requests.Response, stream_state: Mapping[str, Any], **kwargs) -> Iterable[Mapping]: + records = super().parse_response(response, stream_state=stream_state, **kwargs) + for record in records: + record_cursor = record.get(self.cursor_field, "") + state_cursor = stream_state.get(self.cursor_field, "") + if not stream_state or record_cursor >= state_cursor: + yield record + + def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]): + state_cursor = (current_stream_state or {}).get(self.cursor_field, "") + + record_cursor = latest_record.get(self.cursor_field, self.start_date) + + return {self.cursor_field: max(state_cursor, record_cursor)} + class FunnelsList(MixpanelStream): """List all funnels @@ -410,6 +426,14 @@ class Engage(MixpanelStream): page_size: int = 1000 # min 100 _total: Any = None + @property + def source_defined_cursor(self) -> bool: + return False + + @property + def supports_incremental(self) -> bool: + return True + # enable automatic object mutation to align with desired schema before outputting to the destination transformer = TypeTransformer(TransformConfig.DefaultSchemaNormalization) @@ -448,7 +472,7 @@ def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, self._total = None return None - def process_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + def process_response(self, response: requests.Response, stream_state: Mapping[str, Any], **kwargs) -> Iterable[Mapping]: """ { "page": 0 @@ -472,6 +496,7 @@ def process_response(self, response: requests.Response, **kwargs) -> Iterable[Ma "$name":"Nadine Burzler" "id":"632540fa-d1af-4535-bc52-e331955d363e" "$last_seen":"2020-06-28T12:12:31" + ... } },{ ... @@ -481,6 +506,7 @@ def process_response(self, response: requests.Response, **kwargs) -> Iterable[Ma } """ records = response.json().get(self.data_field, {}) + cursor_field = stream_state.get(self.usr_cursor_key()) for record in records: item = {"distinct_id": record["$distinct_id"]} properties = record["$properties"] @@ -492,7 +518,10 @@ def process_response(self, response: requests.Response, **kwargs) -> Iterable[Ma # to stream: 'browser' this_property_name = this_property_name[1:] item[this_property_name] = properties[property_name] - yield item + item_cursor = item.get(cursor_field, "") + state_cursor = stream_state.get(cursor_field, "") + if not stream_state or item_cursor >= state_cursor: + yield item def get_json_schema(self) -> Mapping[str, Any]: """ @@ -533,6 +562,32 @@ def get_json_schema(self) -> Mapping[str, Any]: return schema + def usr_cursor_key(self): + return "usr_cursor_key" + + def read_records( + self, + sync_mode: SyncMode, + cursor_field: List[str] = None, + stream_slice: Mapping[str, Any] = None, + stream_state: Mapping[str, Any] = None, + ) -> Iterable[Mapping[str, Any]]: + if sync_mode == SyncMode.incremental: + cursor_name = cursor_field[-1] + if stream_state: + stream_state[self.usr_cursor_key()] = cursor_name + else: + stream_state = {self.usr_cursor_key(): cursor_name} + return super().read_records(sync_mode, cursor_field, stream_slice, stream_state=stream_state) + + def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]): + cursor_field = current_stream_state.get(self.usr_cursor_key()) + state_cursor = (current_stream_state or {}).get(cursor_field, "") + + record_cursor = latest_record.get(cursor_field, self.start_date) + + return {cursor_field: max(state_cursor, record_cursor)} + class CohortMembers(Engage): """Return list of users grouped by cohort""" @@ -550,7 +605,9 @@ def stream_slices( self, sync_mode, cursor_field: List[str] = None, stream_state: Mapping[str, Any] = None ) -> Iterable[Optional[Mapping[str, Any]]]: stream_slices = [] - cohorts = Cohorts(**self.get_stream_params()).read_records(sync_mode=sync_mode) + # full refresh is needed because even though some cohorts might already have been read + # they can still have new members added + cohorts = Cohorts(**self.get_stream_params()).read_records(SyncMode.full_refresh) for cohort in cohorts: stream_slices.append({"id": cohort["id"]}) @@ -788,12 +845,6 @@ def get_json_schema(self) -> Mapping[str, Any]: return schema -class TokenAuthenticatorBase64(TokenAuthenticator): - def __init__(self, token: str, auth_method: str = "Basic", **kwargs): - token = base64.b64encode(token.encode("utf8")).decode("utf8") - super().__init__(token=token, auth_method=auth_method, **kwargs) - - class SourceMixpanel(AbstractSource): def check_connection(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> Tuple[bool, any]: """ @@ -856,3 +907,9 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: Funnels(authenticator=auth, **config), Revenue(authenticator=auth, **config), ] + + +class TokenAuthenticatorBase64(TokenAuthenticator): + def __init__(self, token: str, auth_method: str = "Basic", **kwargs): + token = base64.b64encode(token.encode("utf8")).decode("utf8") + super().__init__(token=token, auth_method=auth_method, **kwargs) diff --git a/airbyte-integrations/connectors/source-mixpanel/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-mixpanel/unit_tests/test_streams.py index aa2623f83ea9ae..279042369777e3 100644 --- a/airbyte-integrations/connectors/source-mixpanel/unit_tests/test_streams.py +++ b/airbyte-integrations/connectors/source-mixpanel/unit_tests/test_streams.py @@ -9,6 +9,9 @@ from airbyte_cdk.models import SyncMode from source_mixpanel.source import ( Annotations, + CohortMembers, + Cohorts, + Engage, EngageSchema, Export, ExportSchema, @@ -23,6 +26,8 @@ logger = AirbyteLogger() +MIXPANEL_BASE_URL = "https://mixpanel.com/api/2.0/" + @pytest.fixture def patch_base_class(mocker): @@ -69,9 +74,107 @@ def test_updated_state(patch_incremental_base_class): assert updated_state == {"date": "2021-02-25T00:00:00Z"} -def test_cohorts_stream(): - # tested in itaseskii:mixpanel-incremental-syncs - return None +@pytest.fixture +def cohorts_response(): + return setup_response( + 200, + [ + { + "count": 150, + "is_visible": 1, + "description": "This cohort is visible, has an id = 1000, and currently has 150 users.", + "created": "2019-03-19 23:49:51", + "project_id": 1, + "id": 1000, + "name": "Cohort One", + }, + { + "count": 25, + "is_visible": 0, + "description": "This cohort isn't visible, has an id = 2000, and currently has 25 users.", + "created": "2019-04-02 23:22:01", + "project_id": 1, + "id": 2000, + "name": "Cohort Two", + }, + ], + ) + + +def test_cohorts_stream_incremental(requests_mock, cohorts_response): + requests_mock.register_uri("GET", MIXPANEL_BASE_URL + "cohorts/list", cohorts_response) + + stream = Cohorts(authenticator=MagicMock()) + + records = stream.read_records(sync_mode=SyncMode.incremental, stream_state={"created": "2019-04-02 23:22:01"}) + + records_length = sum(1 for _ in records) + assert records_length == 1 + + +@pytest.fixture +def engage_response(): + return setup_response( + 200, + { + "page": 0, + "page_size": 1000, + "session_id": "1234567890-EXAMPL", + "status": "ok", + "total": 2, + "results": [ + { + "$distinct_id": "9d35cd7f-3f06-4549-91bf-198ee58bb58a", + "$properties": { + "$created": "2008-12-12T11:20:47", + "$browser": "Chrome", + "$browser_version": "83.0.4103.116", + "$email": "clark@asw.com", + "$first_name": "Clark", + "$last_name": "Kent", + "$name": "Clark Kent", + }, + }, + { + "$distinct_id": "cd9d357f-3f06-4549-91bf-158bb598ee8a", + "$properties": { + "$created": "2008-11-12T11:20:47", + "$browser": "Firefox", + "$browser_version": "83.0.4103.116", + "$email": "bruce@asw.com", + "$first_name": "Bruce", + "$last_name": "Wayne", + "$name": "Bruce Wayne", + }, + }, + ], + }, + ) + + +def test_engage_stream_incremental(requests_mock, engage_response): + requests_mock.register_uri("POST", MIXPANEL_BASE_URL + "engage?page_size=1000", engage_response) + + stream = Engage(authenticator=MagicMock()) + + records = stream.read_records(sync_mode=SyncMode.incremental, cursor_field=["created"], stream_state={"created": "2008-12-12T11:20:47"}) + + records_length = sum(1 for _ in records) + assert records_length == 1 + + +def test_cohort_members_stream_incremental(requests_mock, engage_response, cohorts_response): + requests_mock.register_uri("POST", MIXPANEL_BASE_URL + "engage?page_size=1000", engage_response) + requests_mock.register_uri("GET", MIXPANEL_BASE_URL + "cohorts/list", cohorts_response) + + stream = CohortMembers(authenticator=MagicMock()) + + records = stream.read_records( + sync_mode=SyncMode.incremental, cursor_field=["created"], stream_state={"created": "2008-12-12T11:20:47"}, stream_slice={"id": 1000} + ) + + records_length = sum(1 for _ in records) + assert records_length == 1 @pytest.fixture diff --git a/docs/integrations/sources/mixpanel.md b/docs/integrations/sources/mixpanel.md index debb8bb54f0295..4bbb274e0e898b 100644 --- a/docs/integrations/sources/mixpanel.md +++ b/docs/integrations/sources/mixpanel.md @@ -59,6 +59,7 @@ Select the correct region \(EU or US\) for your Mixpanel project. See detail [he | Version | Date | Pull Request | Subject | |:---------|:-----------|:---------------------------------------------------------|:-----------------------------------------------------------------------------------------------------| +| `0.1.14` | 2022-05-02 | [11501](https://github.com/airbytehq/airbyte/pull/11501) | Improve incremental sync method to streams | | `0.1.13` | 2022-04-27 | [12335](https://github.com/airbytehq/airbyte/pull/12335) | Adding fixtures to mock time.sleep for connectors that explicitly sleep | | `0.1.12` | 2022-03-31 | [11633](https://github.com/airbytehq/airbyte/pull/11633) | Increase unit test coverage | | `0.1.11` | 2022-04-04 | [11318](https://github.com/airbytehq/airbyte/pull/11318) | Change Response Reading | From 1021428998968a4176a6c2ed5d006ac1db423223 Mon Sep 17 00:00:00 2001 From: Alexandre Girard Date: Mon, 2 May 2022 17:45:36 -0700 Subject: [PATCH 056/152] Destination Bigquery: Update `Dataset location` field description (#12528) * Update field description * reset * Bump * reset * auto-bump connector version * auto-bump connector version Co-authored-by: Octavia Squidington III --- .../src/main/resources/seed/destination_definitions.yaml | 4 ++-- .../init/src/main/resources/seed/destination_specs.yaml | 7 +++---- .../destination-bigquery-denormalized/Dockerfile | 2 +- .../connectors/destination-bigquery/Dockerfile | 2 +- .../destination-bigquery/src/main/resources/spec.json | 2 +- docs/integrations/destinations/bigquery.md | 6 ++++-- 6 files changed, 12 insertions(+), 11 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml index c191692f1aeb32..0775676acb8eda 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml @@ -24,7 +24,7 @@ - name: BigQuery destinationDefinitionId: 22f6c74f-5699-40ff-833c-4a879ea40133 dockerRepository: airbyte/destination-bigquery - dockerImageTag: 1.1.2 + dockerImageTag: 1.1.3 documentationUrl: https://docs.airbyte.io/integrations/destinations/bigquery icon: bigquery.svg resourceRequirements: @@ -36,7 +36,7 @@ - name: BigQuery (denormalized typed struct) destinationDefinitionId: 079d5540-f236-4294-ba7c-ade8fd918496 dockerRepository: airbyte/destination-bigquery-denormalized - dockerImageTag: 0.3.2 + dockerImageTag: 0.3.3 documentationUrl: https://docs.airbyte.io/integrations/destinations/bigquery icon: bigquery.svg resourceRequirements: diff --git a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml index 144375d382e74e..7eb63ea62bae70 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml @@ -285,7 +285,7 @@ supported_destination_sync_modes: - "overwrite" - "append" -- dockerImage: "airbyte/destination-bigquery:1.1.2" +- dockerImage: "airbyte/destination-bigquery:1.1.3" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/bigquery" connectionSpecification: @@ -321,8 +321,7 @@ dataset_location: type: "string" description: "The location of the dataset. Warning: Changes made after creation\ - \ will not be applied. The default \"US\" value is used if not set explicitly.\ - \ Read more here." title: "Dataset Location" enum: @@ -495,7 +494,7 @@ - "overwrite" - "append" - "append_dedup" -- dockerImage: "airbyte/destination-bigquery-denormalized:0.3.2" +- dockerImage: "airbyte/destination-bigquery-denormalized:0.3.3" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/bigquery" connectionSpecification: diff --git a/airbyte-integrations/connectors/destination-bigquery-denormalized/Dockerfile b/airbyte-integrations/connectors/destination-bigquery-denormalized/Dockerfile index c1a1ad24403bda..3ef5865c9d0e1e 100644 --- a/airbyte-integrations/connectors/destination-bigquery-denormalized/Dockerfile +++ b/airbyte-integrations/connectors/destination-bigquery-denormalized/Dockerfile @@ -17,5 +17,5 @@ ENV ENABLE_SENTRY true COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.3.2 +LABEL io.airbyte.version=0.3.3 LABEL io.airbyte.name=airbyte/destination-bigquery-denormalized diff --git a/airbyte-integrations/connectors/destination-bigquery/Dockerfile b/airbyte-integrations/connectors/destination-bigquery/Dockerfile index 0804008a69dad3..403fd94981c33b 100644 --- a/airbyte-integrations/connectors/destination-bigquery/Dockerfile +++ b/airbyte-integrations/connectors/destination-bigquery/Dockerfile @@ -17,5 +17,5 @@ ENV ENABLE_SENTRY true COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=1.1.2 +LABEL io.airbyte.version=1.1.3 LABEL io.airbyte.name=airbyte/destination-bigquery diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-bigquery/src/main/resources/spec.json index 3195be26c9d5ec..b51812ee0d7b0e 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/resources/spec.json @@ -27,7 +27,7 @@ }, "dataset_location": { "type": "string", - "description": "The location of the dataset. Warning: Changes made after creation will not be applied. The default \"US\" value is used if not set explicitly. Read more here.", + "description": "The location of the dataset. Warning: Changes made after creation will not be applied. Read more here.", "title": "Dataset Location", "enum": [ "US", diff --git a/docs/integrations/destinations/bigquery.md b/docs/integrations/destinations/bigquery.md index ad4a7deefcf912..68d90a3e254baa 100644 --- a/docs/integrations/destinations/bigquery.md +++ b/docs/integrations/destinations/bigquery.md @@ -209,6 +209,7 @@ This uploads data directly from your source to BigQuery. While this is faster to | Version | Date | Pull Request | Subject | |:--------|:-----------| :--- |:--------------------------------------------------------------------------------------------| +| 1.1.3 | 2022-05-02 | [12528](https://github.com/airbytehq/airbyte/pull/12528/) | Update Dataset location field description | | 1.1.2 | 2022-04-29 | [12477](https://github.com/airbytehq/airbyte/pull/12477) | Dataset location is a required field | | 1.1.1 | 2022-04-15 | [12068](https://github.com/airbytehq/airbyte/pull/12068) | Fixed bug with GCS bucket conditional binding | | 1.1.0 | 2022-04-06 | [11776](https://github.com/airbytehq/airbyte/pull/11776) | Use serialized buffering strategy to reduce memory consumption. | @@ -231,9 +232,9 @@ This uploads data directly from your source to BigQuery. While this is faster to | 0.4.1 | 2021-10-04 | [\#6733](https://github.com/airbytehq/airbyte/issues/6733) | Support dataset starting with numbers | | 0.4.0 | 2021-08-26 | [\#5296](https://github.com/airbytehq/airbyte/issues/5296) | Added GCS Staging uploading option | | 0.3.12 | 2021-08-03 | [\#3549](https://github.com/airbytehq/airbyte/issues/3549) | Add optional arg to make a possibility to change the BigQuery client's chunk\buffer size | -| 0.3.11 | 2021-07-30 | [\#5125](https://github.com/airbytehq/airbyte/pull/5125) | Enable `additionalPropertities` in spec.json | +| 0.3.11 | 2021-07-30 | [\#5125](https://github.com/airbytehq/airbyte/pull/5125) | Enable `additionalPropertities` in spec.json | | 0.3.10 | 2021-07-28 | [\#3549](https://github.com/airbytehq/airbyte/issues/3549) | Add extended logs and made JobId filled with region and projectId | -| 0.3.9 | 2021-07-28 | [\#5026](https://github.com/airbytehq/airbyte/pull/5026) | Add sanitized json fields in raw tables to handle quotes in column names | +| 0.3.9 | 2021-07-28 | [\#5026](https://github.com/airbytehq/airbyte/pull/5026) | Add sanitized json fields in raw tables to handle quotes in column names | | 0.3.6 | 2021-06-18 | [\#3947](https://github.com/airbytehq/airbyte/issues/3947) | Service account credentials are now optional. | | 0.3.4 | 2021-06-07 | [\#3277](https://github.com/airbytehq/airbyte/issues/3277) | Add dataset location option | @@ -241,6 +242,7 @@ This uploads data directly from your source to BigQuery. While this is faster to | Version | Date | Pull Request | Subject | |:--------|:-----------|:-----------------------------------------------------------| :--- | +| 0.3.3 | 2022-05-02 | [12528](https://github.com/airbytehq/airbyte/pull/12528/) | Update Dataset location field description | | 0.3.2 | 2022-04-29 | [12477](https://github.com/airbytehq/airbyte/pull/12477) | Dataset location is a required field | | 0.3.1 | 2022-04-15 | [11978](https://github.com/airbytehq/airbyte/pull/11978) | Fixed emittedAt timestamp. | | 0.3.0 | 2022-04-06 | [11776](https://github.com/airbytehq/airbyte/pull/11776) | Use serialized buffering strategy to reduce memory consumption. | From ce1936e491fd148fca7b4b4d70e89811ca077200 Mon Sep 17 00:00:00 2001 From: Alexandre Girard Date: Mon, 2 May 2022 17:45:50 -0700 Subject: [PATCH 057/152] Source Hubspot - Log instructions to update scopes when hitting 403 HTTP error (#12515) * read workflows * try except * Revert "read workflows" This reverts commit 86db8ff850ab615793494b20649445a2e67fe983. * Check campaigns * Check by reading data * Check all streams * requests_mock.ANY * mock all http methods * Try with get and post * reset to master * log name * bump version --- .../connectors/source-hubspot/Dockerfile | 2 +- .../source-hubspot/source_hubspot/streams.py | 61 +++++++++++-------- docs/integrations/sources/hubspot.md | 11 ++-- 3 files changed, 42 insertions(+), 32 deletions(-) diff --git a/airbyte-integrations/connectors/source-hubspot/Dockerfile b/airbyte-integrations/connectors/source-hubspot/Dockerfile index 504b0f5d2164d4..c00e724d30857e 100644 --- a/airbyte-integrations/connectors/source-hubspot/Dockerfile +++ b/airbyte-integrations/connectors/source-hubspot/Dockerfile @@ -34,5 +34,5 @@ COPY source_hubspot ./source_hubspot ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.55 +LABEL io.airbyte.version=0.1.56 LABEL io.airbyte.name=airbyte/source-hubspot diff --git a/airbyte-integrations/connectors/source-hubspot/source_hubspot/streams.py b/airbyte-integrations/connectors/source-hubspot/source_hubspot/streams.py index 57221ba6d0dfc8..764618dd229877 100644 --- a/airbyte-integrations/connectors/source-hubspot/source_hubspot/streams.py +++ b/airbyte-integrations/connectors/source-hubspot/source_hubspot/streams.py @@ -331,32 +331,41 @@ def read_records( pagination_complete = False next_page_token = None - with AirbyteSentry.start_transaction("read_records", self.name), AirbyteSentry.start_transaction_span("read_records"): - while not pagination_complete: - - properties_list = list(self.properties.keys()) - if properties_list: - stream_records, response = self._read_stream_records( - properties_list=properties_list, - stream_slice=stream_slice, - stream_state=stream_state, - next_page_token=next_page_token, - ) - records = [value for key, value in stream_records.items()] - else: - response = self.handle_request(stream_slice=stream_slice, stream_state=stream_state, next_page_token=next_page_token) - records = self._transform(self.parse_response(response, stream_state=stream_state, stream_slice=stream_slice)) - - if self.filter_old_records: - records = self._filter_old_records(records) - yield from records - - next_page_token = self.next_page_token(response) - if not next_page_token: - pagination_complete = True - - # Always return an empty generator just in case no records were ever yielded - yield from [] + try: + with AirbyteSentry.start_transaction("read_records", self.name), AirbyteSentry.start_transaction_span("read_records"): + while not pagination_complete: + + properties_list = list(self.properties.keys()) + if properties_list: + stream_records, response = self._read_stream_records( + properties_list=properties_list, + stream_slice=stream_slice, + stream_state=stream_state, + next_page_token=next_page_token, + ) + records = [value for key, value in stream_records.items()] + else: + response = self.handle_request( + stream_slice=stream_slice, stream_state=stream_state, next_page_token=next_page_token + ) + records = self._transform(self.parse_response(response, stream_state=stream_state, stream_slice=stream_slice)) + + if self.filter_old_records: + records = self._filter_old_records(records) + yield from records + + next_page_token = self.next_page_token(response) + if not next_page_token: + pagination_complete = True + + # Always return an empty generator just in case no records were ever yielded + yield from [] + except requests.exceptions.HTTPError as e: + status_code = e.response.status_code + if status_code == 403: + raise RuntimeError(f"Invalid permissions for {self.name}. Please ensure the all scopes are authorized for.") + else: + raise e @staticmethod def _convert_datetime_to_string(dt: pendulum.datetime, declared_format: str = None) -> str: diff --git a/docs/integrations/sources/hubspot.md b/docs/integrations/sources/hubspot.md index 353218e67d6142..b4e38c7966a65f 100644 --- a/docs/integrations/sources/hubspot.md +++ b/docs/integrations/sources/hubspot.md @@ -147,12 +147,13 @@ If you are using OAuth, most of the streams require the appropriate [scopes](htt | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------| -| 0.1.55 | 2022-04-28 | [12424](https://github.com/airbytehq/airbyte/pull/12424) | Correct schema for ticket_pipeline stream | -| 0.1.54 | 2022-04-28 | [12335](https://github.com/airbytehq/airbyte/pull/12335) | Mock time slep in unit test s | -| 0.1.53 | 2022-04-20 | [12230](https://github.com/airbytehq/airbyte/pull/12230) | chaneg spec json to yaml format | +| 0.1.56 | 2022-05-02 | [12515](https://github.com/airbytehq/airbyte/pull/12515) | Extra logs for troubleshooting 403 errors | +| 0.1.55 | 2022-04-28 | [12424](https://github.com/airbytehq/airbyte/pull/12424) | Correct schema for ticket_pipeline stream | +| 0.1.54 | 2022-04-28 | [12335](https://github.com/airbytehq/airbyte/pull/12335) | Mock time slep in unit test s | +| 0.1.53 | 2022-04-20 | [12230](https://github.com/airbytehq/airbyte/pull/12230) | chaneg spec json to yaml format | | 0.1.52 | 2022-03-25 | [11423](https://github.com/airbytehq/airbyte/pull/11423) | Add tickets associations to engagements streams | -| 0.1.51 | 2022-03-24 | [11321](https://github.com/airbytehq/airbyte/pull/11321) | Fix updated at field non exists issue | -| 0.1.50 | 2022-03-22 | [11266](https://github.com/airbytehq/airbyte/pull/11266) | Fix Engagements Stream Pagination | +| 0.1.51 | 2022-03-24 | [11321](https://github.com/airbytehq/airbyte/pull/11321) | Fix updated at field non exists issue | +| 0.1.50 | 2022-03-22 | [11266](https://github.com/airbytehq/airbyte/pull/11266) | Fix Engagements Stream Pagination | | 0.1.49 | 2022-03-17 | [11218](https://github.com/airbytehq/airbyte/pull/11218) | Anchor hyperlink in input configuration | | 0.1.48 | 2022-03-16 | [11105](https://github.com/airbytehq/airbyte/pull/11105) | Fix float numbers, upd docs | | 0.1.47 | 2022-03-15 | [11121](https://github.com/airbytehq/airbyte/pull/11121) | Add partition keys where appropriate | From c570225acec6eac062ada910a0f1d93b28ac6901 Mon Sep 17 00:00:00 2001 From: Davin Chia Date: Tue, 3 May 2022 17:21:37 +0800 Subject: [PATCH 058/152] Make job docker container name meaningful. (#12503) Closes #9584. This now follows the Kubernetes name convention of --. This should make things easier for oss users doing oncall for docker deployments. Mirror the Kubernetes name convention: https://github.com/airbytehq/airbyte/blob/master/airbyte-workers/src/main/java/io/airbyte/workers/process/KubeProcessFactory.java#L180 We can probably reuse the naming functions across both processes. We are going to do more changes in this area around making things more ergonomic (e.g. instead of always using sync, we want to replace this with the actual operation - discover, check connection etc.) so we'll do refactoring in that PR. I want to unblock this in the time being since open source users are having difficulty debugging now. --- .../workers/process/DockerProcessFactory.java | 26 +++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/process/DockerProcessFactory.java b/airbyte-workers/src/main/java/io/airbyte/workers/process/DockerProcessFactory.java index e40f884cf93875..61c33bd6db3810 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/process/DockerProcessFactory.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/process/DockerProcessFactory.java @@ -23,12 +23,15 @@ import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; +import org.apache.commons.lang3.RandomStringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class DockerProcessFactory implements ProcessFactory { private static final Logger LOGGER = LoggerFactory.getLogger(DockerProcessFactory.class); + private static final String VERSION_DELIMITER = ":"; + private static final String DOCKER_DELIMITER = "/"; private static final Path DATA_MOUNT_DESTINATION = Path.of("/data"); private static final Path LOCAL_MOUNT_DESTINATION = Path.of("/local"); @@ -114,6 +117,9 @@ public Process create(final String jobId, rebasePath(jobRoot).toString(), // rebases the job root on the job data mount "--log-driver", "none"); + final String containerName = createContainerName(imageName, jobId, attempt); + cmd.add("--name"); + cmd.add(containerName); if (networkName != null) { cmd.add("--network"); @@ -163,6 +169,26 @@ public Process create(final String jobId, } } + private static String createContainerName(final String fullImagePath, final String jobId, final int attempt) { + final var noVersion = fullImagePath.split(VERSION_DELIMITER)[0]; + + final var nameParts = noVersion.split(DOCKER_DELIMITER); + var imageName = nameParts[nameParts.length - 1]; + + final var randSuffix = RandomStringUtils.randomAlphabetic(5).toLowerCase(); + final String suffix = "sync" + "-" + jobId + "-" + attempt + "-" + randSuffix; + + var podName = imageName + "-" + suffix; + final var podNameLenLimit = 128; + if (podName.length() > podNameLenLimit) { + final var extra = podName.length() - podNameLenLimit; + imageName = imageName.substring(extra); + podName = imageName + "-" + suffix; + } + + return podName; + } + private Path rebasePath(final Path jobRoot) { final Path relativePath = workspaceRoot.relativize(jobRoot); return DATA_MOUNT_DESTINATION.resolve(relativePath); From 3ab08994ac56a8e5bb79e6911c3ff517001b7b56 Mon Sep 17 00:00:00 2001 From: oneshcheret <33333155+sashaNeshcheret@users.noreply.github.com> Date: Tue, 3 May 2022 13:42:06 +0300 Subject: [PATCH 059/152] S3 destination: updating specs regarding certification (#11917) * S3 destination: updating specs regarding certification * S3 destination: updating required fields * S3 destination: updating required fields * Apply suggestions from code review Co-authored-by: Andy * S3 destination: updating specs with links * S3 destination: updating specs with links * Apply suggestions from code review Co-authored-by: Andy * S3 destination: added links to the fields * S3 destination: updating specs with links Co-authored-by: Andy --- .../src/main/resources/spec.json | 121 ++++++++++-------- 1 file changed, 65 insertions(+), 56 deletions(-) diff --git a/airbyte-integrations/connectors/destination-s3/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-s3/src/main/resources/spec.json index 762f220cf6a3c2..9f66df48798c03 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/destination-s3/src/main/resources/spec.json @@ -16,40 +16,41 @@ ], "additionalProperties": false, "properties": { - "s3_endpoint": { - "title": "Endpoint", + "access_key_id": { "type": "string", - "default": "", - "description": "This is your S3 endpoint url.(if you are working with AWS S3, just leave empty).", - "examples": ["http://localhost:9000"], + "description": "The access key ID to access the S3 bucket. Airbyte requires Read and Write permissions to the given bucket. See [this](https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html#access-keys-and-secret-access-keys) on how to generate an access key.", + "title": "S3 Key ID *", + "airbyte_secret": true, + "examples": ["A012345678910EXAMPLE"], "order": 0 }, - "s3_bucket_name": { - "title": "S3 Bucket Name", + "secret_access_key": { "type": "string", - "description": "The name of the S3 bucket.", - "examples": ["airbyte_sync"], + "description": "The corresponding secret to the access key ID. See [this](https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html#access-keys-and-secret-access-keys)", + "title": "S3 Access Key *", + "airbyte_secret": true, + "examples": ["a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY"], "order": 1 }, - "s3_bucket_path": { - "description": "Directory under the S3 bucket where data will be written.", + "s3_bucket_name": { + "title": "S3 Bucket Name *", "type": "string", - "examples": ["data_sync/test"], + "description": "The name of the S3 bucket. See [this](https://docs.aws.amazon.com/AmazonS3/latest/userguide/create-bucket-overview.html) to create an S3 bucket.", + "examples": ["airbyte_sync"], "order": 2 }, - "s3_path_format": { - "description": "Format string on how data will be organized inside the S3 bucket directory", + "s3_bucket_path": { + "title": "S3 Bucket Path *", + "description": "Directory under the S3 bucket where data will be written. See [this](https://docs.airbyte.com/integrations/destinations/s3#:~:text=to%20format%20the-,bucket%20path,-%3A)", "type": "string", - "examples": [ - "${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_" - ], + "examples": ["data_sync/test"], "order": 3 }, "s3_bucket_region": { - "title": "S3 Bucket Region", + "title": "S3 Bucket Region *", "type": "string", "default": "", - "description": "The region of the S3 bucket.", + "description": "The region of the S3 bucket. See [this](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/using-regions-availability-zones.html#concepts-available-regions) for all region codes.", "enum": [ "", "us-east-1", @@ -80,43 +81,29 @@ ], "order": 4 }, - "access_key_id": { - "type": "string", - "description": "The access key id to access the S3 bucket. Airbyte requires Read and Write permissions to the given bucket, if not set, Airbyte will rely on Instance Profile.", - "title": "S3 Key Id", - "airbyte_secret": true, - "examples": ["A012345678910EXAMPLE"], - "order": 5 - }, - "secret_access_key": { - "type": "string", - "description": "The corresponding secret to the access key id, if S3 Key Id is set, then S3 Access Key must also be provided", - "title": "S3 Access Key", - "airbyte_secret": true, - "examples": ["a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY"], - "order": 6 - }, "format": { - "title": "Output Format", + "title": "Output Format *", "type": "object", - "description": "Output data format", + "description": "Format of the data output. See [this](https://docs.airbyte.com/integrations/destinations/s3/#output-schema) for more details", "oneOf": [ { "title": "Avro: Apache Avro", "required": ["format_type", "compression_codec"], "properties": { "format_type": { + "title": "Format Type *", "type": "string", "enum": ["Avro"], - "default": "Avro" + "default": "Avro", + "order": 0 }, "compression_codec": { - "title": "Compression Codec", + "title": "Compression Codec *", "description": "The compression algorithm used to compress data. Default to no compression.", "type": "object", "oneOf": [ { - "title": "no compression", + "title": "No Compression", "required": ["codec"], "properties": { "codec": { @@ -136,7 +123,7 @@ "default": "Deflate" }, "compression_level": { - "title": "Deflate level", + "title": "Deflate Level", "description": "0: no compression & fastest, 9: best compression & slowest.", "type": "integer", "default": 0, @@ -166,7 +153,7 @@ "default": "xz" }, "compression_level": { - "title": "Compression level", + "title": "Compression Level", "description": "See here for details.", "type": "integer", "default": 6, @@ -185,7 +172,7 @@ "default": "zstandard" }, "compression_level": { - "title": "Compression level", + "title": "Compression Level", "description": "Negative levels are 'fast' modes akin to lz4 or snappy, levels above 9 are generally for archival purposes, and levels above 18 use a lot of memory.", "type": "integer", "default": 3, @@ -193,7 +180,7 @@ "maximum": 22 }, "include_checksum": { - "title": "Include checksum", + "title": "Include Checksum", "description": "If true, include a checksum with each data block.", "type": "boolean", "default": false @@ -211,14 +198,16 @@ } } } - ] + ], + "order": 1 }, "part_size_mb": { - "title": "Block Size (MB) for Amazon S3 multipart upload", + "title": "Block Size (MB) for Amazon S3 multipart upload (Optional)", "description": "This is the size of a \"Part\" being buffered in memory. It limits the memory usage when writing. Larger values will allow to upload a bigger files and improve the speed, but consumes9 more memory. Allowed values: min=5MB, max=525MB Default: 5MB.", "type": "integer", "default": 5, - "examples": [5] + "examples": [5], + "order": 2 } } }, @@ -227,6 +216,7 @@ "required": ["format_type", "flattening"], "properties": { "format_type": { + "title": "Format Type *", "type": "string", "enum": ["CSV"], "default": "CSV" @@ -239,7 +229,7 @@ "enum": ["No flattening", "Root level flattening"] }, "part_size_mb": { - "title": "Block Size (MB) for Amazon S3 multipart upload", + "title": "Block Size (MB) For Amazon S3 Multipart Upload (Optional)", "description": "This is the size of a \"Part\" being buffered in memory. It limits the memory usage when writing. Larger values will allow to upload a bigger files and improve the speed, but consumes9 more memory. Allowed values: min=5MB, max=525MB Default: 5MB.", "type": "integer", "default": 5, @@ -277,16 +267,17 @@ } }, { - "title": "JSON Lines: newline-delimited JSON", + "title": "JSON Lines: Newline-delimited JSON", "required": ["format_type"], "properties": { "format_type": { + "title": "Format Type *", "type": "string", "enum": ["JSONL"], "default": "JSONL" }, "part_size_mb": { - "title": "Block Size (MB) for Amazon S3 multipart upload", + "title": "Block Size (MB) For Amazon S3 Multipart Upload (Optional)", "description": "This is the size of a \"Part\" being buffered in memory. It limits the memory usage when writing. Larger values will allow to upload a bigger files and improve the speed, but consumes9 more memory. Allowed values: min=5MB, max=525MB Default: 5MB.", "type": "integer", "default": 5, @@ -328,12 +319,13 @@ "required": ["format_type"], "properties": { "format_type": { + "title": "Format Type *", "type": "string", "enum": ["Parquet"], "default": "Parquet" }, "compression_codec": { - "title": "Compression Codec", + "title": "Compression Codec (Optional)", "description": "The compression algorithm used to compress data pages.", "type": "string", "enum": [ @@ -348,35 +340,35 @@ "default": "UNCOMPRESSED" }, "block_size_mb": { - "title": "Block Size (Row Group Size) (MB)", + "title": "Block Size (Row Group Size) (MB) (Optional)", "description": "This is the size of a row group being buffered in memory. It limits the memory usage when writing. Larger values will improve the IO when reading, but consume more memory when writing. Default: 128 MB.", "type": "integer", "default": 128, "examples": [128] }, "max_padding_size_mb": { - "title": "Max Padding Size (MB)", + "title": "Max Padding Size (MB) (Optional)", "description": "Maximum size allowed as padding to align row groups. This is also the minimum size of a row group. Default: 8 MB.", "type": "integer", "default": 8, "examples": [8] }, "page_size_kb": { - "title": "Page Size (KB)", + "title": "Page Size (KB) (Optional)", "description": "The page size is for compression. A block is composed of pages. A page is the smallest unit that must be read fully to access a single record. If this value is too small, the compression will deteriorate. Default: 1024 KB.", "type": "integer", "default": 1024, "examples": [1024] }, "dictionary_page_size_kb": { - "title": "Dictionary Page Size (KB)", + "title": "Dictionary Page Size (KB) (Optional)", "description": "There is one dictionary page per column per row group when dictionary encoding is used. The dictionary page size works like the page size but for dictionary. Default: 1024 KB.", "type": "integer", "default": 1024, "examples": [1024] }, "dictionary_encoding": { - "title": "Dictionary Encoding", + "title": "Dictionary Encoding (Optional)", "description": "Default: true.", "type": "boolean", "default": true @@ -384,6 +376,23 @@ } } ], + "order": 5 + }, + "s3_endpoint": { + "title": "Endpoint (Optional)", + "type": "string", + "default": "", + "description": "This is your S3 endpoint url. (If you are working with AWS S3, you can leave blank). See [this](https://docs.aws.amazon.com/general/latest/gr/s3.html#:~:text=Service%20endpoints-,Amazon%20S3%20endpoints,-When%20you%20use)", + "examples": ["http://localhost:9000"], + "order": 6 + }, + "s3_path_format": { + "title": "S3 Path Format (Optional)", + "description": "Format string on how data will be organized inside the S3 bucket directory. See [this](https://docs.airbyte.com/integrations/destinations/s3#:~:text=The%20full%20path%20of%20the%20output%20data%20with%20the%20default%20S3%20path%20format)", + "type": "string", + "examples": [ + "${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_" + ], "order": 7 } } From 9ffd5bb0e8c938b34068b6d2b88cd3e55c7739c4 Mon Sep 17 00:00:00 2001 From: George Claireaux Date: Tue, 3 May 2022 12:32:20 +0100 Subject: [PATCH 060/152] Source Facebook Marketing: Attempt to retry failing jobs that are already split to minimum size (#12390) * restart jobs that are already split to smallest size * manager now fails on nested jobs hitting max attempts * version bump * auto-bump connector version Co-authored-by: Octavia Squidington III --- .../resources/seed/source_definitions.yaml | 2 +- .../src/main/resources/seed/source_specs.yaml | 2 +- .../source-facebook-marketing/Dockerfile | 2 +- .../streams/async_job.py | 10 ++++++-- .../streams/async_job_manager.py | 6 +++++ .../unit_tests/test_async_job.py | 16 ++++++++++++- .../unit_tests/test_async_job_manager.py | 23 +++++++++++++++++++ .../sources/facebook-marketing.md | 1 + 8 files changed, 56 insertions(+), 6 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index 296d70636390b0..a9b096d3782cfc 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -210,7 +210,7 @@ - name: Facebook Marketing sourceDefinitionId: e7778cfc-e97c-4458-9ecb-b4f2bba8946c dockerRepository: airbyte/source-facebook-marketing - dockerImageTag: 0.2.44 + dockerImageTag: 0.2.45 documentationUrl: https://docs.airbyte.io/integrations/sources/facebook-marketing icon: facebook.svg sourceType: api diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index 9f925c519434d0..aafa7185f3cd2a 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -1748,7 +1748,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-facebook-marketing:0.2.44" +- dockerImage: "airbyte/source-facebook-marketing:0.2.45" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/facebook-marketing" changelogUrl: "https://docs.airbyte.io/integrations/sources/facebook-marketing" diff --git a/airbyte-integrations/connectors/source-facebook-marketing/Dockerfile b/airbyte-integrations/connectors/source-facebook-marketing/Dockerfile index 57484775451d54..dbf4bac16a1071 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/Dockerfile +++ b/airbyte-integrations/connectors/source-facebook-marketing/Dockerfile @@ -13,5 +13,5 @@ ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.2.44 +LABEL io.airbyte.version=0.2.45 LABEL io.airbyte.name=airbyte/source-facebook-marketing diff --git a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/async_job.py b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/async_job.py index 195fb1d1e84dbf..c27702cf09ae22 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/async_job.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/async_job.py @@ -158,7 +158,13 @@ def split_job(self) -> List["AsyncJob"]: new_jobs = [] for job in self._jobs: if job.failed: - new_jobs.extend(job.split_job()) + try: + new_jobs.extend(job.split_job()) + except ValueError as split_limit_error: + logger.error(split_limit_error) + logger.info(f'can\'t split "{job}" any smaller, attempting to retry the job.') + job.restart() + new_jobs.append(job) else: new_jobs.append(job) return new_jobs @@ -202,7 +208,7 @@ def split_job(self) -> List["AsyncJob"]: return self._split_by_edge_class(AdSet) elif isinstance(self._edge_object, AdSet): return self._split_by_edge_class(Ad) - raise RuntimeError("The job is already splitted to the smallest size.") + raise ValueError("The job is already splitted to the smallest size.") def _split_by_edge_class(self, edge_class: Union[Type[Campaign], Type[AdSet], Type[Ad]]) -> List[AsyncJob]: """Split insight job by creating insight jobs from lower edge object, i.e. diff --git a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/async_job_manager.py b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/async_job_manager.py index 85cae8307ab36e..a24d2deac0bb62 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/async_job_manager.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/async_job_manager.py @@ -94,6 +94,12 @@ def _check_jobs_status_and_restart(self) -> List[AsyncJob]: self._wait_throttle_limit_down() for job in self._running_jobs: if job.failed: + if isinstance(job, ParentAsyncJob): + # if this job is a ParentAsyncJob, it holds X number of jobs + # we want to check that none of these nested jobs have exceeded MAX_NUMBER_OF_ATTEMPTS + for nested_job in job._jobs: + if nested_job.attempt_number >= self.MAX_NUMBER_OF_ATTEMPTS: + raise JobException(f"{nested_job}: failed more than {self.MAX_NUMBER_OF_ATTEMPTS} times. Terminating...") if job.attempt_number >= self.MAX_NUMBER_OF_ATTEMPTS: raise JobException(f"{job}: failed more than {self.MAX_NUMBER_OF_ATTEMPTS} times. Terminating...") elif job.attempt_number == 2: diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_async_job.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_async_job.py index 6d6e14c135c9be..9ea4581f17d6b0 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_async_job.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_async_job.py @@ -336,7 +336,7 @@ def test_split_job_smallest(self, mocker, api): params = {"time_increment": 1, "breakdowns": []} job = InsightAsyncJob(api=api, edge_object=Ad(1), interval=interval, params=params) - with pytest.raises(RuntimeError, match="The job is already splitted to the smallest size."): + with pytest.raises(ValueError, match="The job is already splitted to the smallest size."): job.split_job() @@ -415,5 +415,19 @@ def test_split_job(self, parent_job, grouped_jobs, mocker): else: job.split_job.assert_not_called() + def test_split_job_smallest(self, parent_job, grouped_jobs): + grouped_jobs[0].failed = True + grouped_jobs[0].split_job.side_effect = ValueError("Mocking smallest size") + + # arbitrarily testing this X times, the max attempts is handled by async_job_manager rather than the job itself. + count = 0 + while count < 10: + split_jobs = parent_job.split_job() + assert len(split_jobs) == len( + grouped_jobs + ), "attempted to split job at smallest size so should just restart job meaning same no. of jobs" + grouped_jobs[0].attempt_number += 1 + count += 1 + def test_str(self, parent_job, grouped_jobs): assert str(parent_job) == f"ParentAsyncJob({grouped_jobs[0]} ... {len(grouped_jobs) - 1} jobs more)" diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_async_job_manager.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_async_job_manager.py index ea3e19a7e2d17a..2d3b7ca3e75679 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_async_job_manager.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_async_job_manager.py @@ -151,3 +151,26 @@ def update_job_behaviour(): with pytest.raises(JobException, match=f"{jobs[1]}: failed more than {InsightAsyncJobManager.MAX_NUMBER_OF_ATTEMPTS} times."): next(manager.completed_jobs(), None) + + def test_nested_job_failed_too_many_times(self, api, mocker, time_mock, update_job_mock): + """Manager should fail when a nested job within a ParentAsyncJob failed too many times""" + + def update_job_behaviour(): + jobs[1].failed = True + sub_jobs[1].failed = True + sub_jobs[1].attempt_number = InsightAsyncJobManager.MAX_NUMBER_OF_ATTEMPTS + yield from range(10) + + update_job_mock.side_effect = update_job_behaviour() + sub_jobs = [ + mocker.Mock(spec=InsightAsyncJob, attempt_number=1, failed=False, completed=True), + mocker.Mock(spec=InsightAsyncJob, attempt_number=1, failed=False, completed=False), + ] + jobs = [ + mocker.Mock(spec=InsightAsyncJob, attempt_number=1, failed=False, completed=True), + mocker.Mock(spec=ParentAsyncJob, _jobs=sub_jobs, attempt_number=1, failed=False, completed=False), + ] + manager = InsightAsyncJobManager(api=api, jobs=jobs) + + with pytest.raises(JobException): + next(manager.completed_jobs(), None) diff --git a/docs/integrations/sources/facebook-marketing.md b/docs/integrations/sources/facebook-marketing.md index ddcadaf1627d83..98bd82243c0f11 100644 --- a/docs/integrations/sources/facebook-marketing.md +++ b/docs/integrations/sources/facebook-marketing.md @@ -108,6 +108,7 @@ For more information, see the [Facebook Insights API documentation.](https://dev | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 0.2.45 | 2022-05-03 | [12390](https://github.com/airbytehq/airbyte/pull/12390) | Better retry logic for split-up async jobs | | 0.2.44 | 2022-04-14 | [11751](https://github.com/airbytehq/airbyte/pull/11751) | Update API to a directly initialise an AdAccount with the given ID | | 0.2.43 | 2022-04-13 | [11801](https://github.com/airbytehq/airbyte/pull/11801) | Fix `user_tos_accepted` schema to be an object | 0.2.42 | 2022-04-06 | [11761](https://github.com/airbytehq/airbyte/pull/11761) | Upgrade Facebook Python SDK to version 13| From 51be2fcc32dfa3c7fcb5a893ae45c3c7e929e110 Mon Sep 17 00:00:00 2001 From: Martin Morset <41419288+dingobar@users.noreply.github.com> Date: Tue, 3 May 2022 14:46:56 +0200 Subject: [PATCH 061/152] helm: Remove unused PVC resources (#12387) --- charts/airbyte/Chart.yaml | 2 +- charts/airbyte/README.md | 73 ++++++++++++------- .../airbyte/templates/server/deployment.yaml | 9 --- charts/airbyte/templates/server/pvc-data.yaml | 14 ---- charts/airbyte/values.yaml | 24 +----- 5 files changed, 51 insertions(+), 71 deletions(-) delete mode 100644 charts/airbyte/templates/server/pvc-data.yaml diff --git a/charts/airbyte/Chart.yaml b/charts/airbyte/Chart.yaml index fbcf3833152504..ccbd47b7fcbabd 100644 --- a/charts/airbyte/Chart.yaml +++ b/charts/airbyte/Chart.yaml @@ -15,7 +15,7 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. # Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 0.3.1 +version: 0.3.2 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. Versions are not expected to diff --git a/charts/airbyte/README.md b/charts/airbyte/README.md index 22c78bede3fa54..7dd042a6f06bcb 100644 --- a/charts/airbyte/README.md +++ b/charts/airbyte/README.md @@ -6,10 +6,9 @@ Helm charts for Airbyte. ### Global Parameters -| Name | Description | Value | -| ---------------------- | -------------------------------------------- | ----- | -| `global.imageRegistry` | Global Docker image registry | `""` | -| `global.storageClass` | Global StorageClass for Persistent Volume(s) | `""` | +| Name | Description | Value | +| ---------------------- | ---------------------------- | ----- | +| `global.imageRegistry` | Global Docker image registry | `""` | ### Common Parameters @@ -31,7 +30,7 @@ Helm charts for Airbyte. | `webapp.replicaCount` | Number of webapp replicas | `1` | | `webapp.image.repository` | The repository to use for the airbyte webapp image. | `airbyte/webapp` | | `webapp.image.pullPolicy` | the pull policy to use for the airbyte webapp image | `IfNotPresent` | -| `webapp.image.tag` | The airbyte webapp image tag. Defaults to the chart's AppVersion | `0.36.6-alpha` | +| `webapp.image.tag` | The airbyte webapp image tag. Defaults to the chart's AppVersion | `0.36.6-alpha` | | `webapp.podAnnotations` | Add extra annotations to the webapp pod(s) | `{}` | | `webapp.containerSecurityContext` | Security context for the container | `{}` | | `webapp.livenessProbe.enabled` | Enable livenessProbe on the webapp | `true` | @@ -68,19 +67,35 @@ Helm charts for Airbyte. ### Scheduler Parameters -| Name | Description | Value | -| ------------------------------ | ------------------------------------------------------------------- | ------------------- | -| `scheduler.replicaCount` | Number of scheduler replicas | `1` | -| `scheduler.image.repository` | The repository to use for the airbyte scheduler image. | `airbyte/scheduler` | -| `scheduler.image.pullPolicy` | the pull policy to use for the airbyte scheduler image | `IfNotPresent` | -| `scheduler.image.tag` | The airbyte scheduler image tag. Defaults to the chart's AppVersion | `0.36.6-alpha` | -| `scheduler.podAnnotations` | Add extra annotations to the scheduler pod | `{}` | -| `scheduler.resources.limits` | The resources limits for the scheduler container | `{}` | -| `scheduler.resources.requests` | The requested resources for the scheduler container | `{}` | -| `scheduler.nodeSelector` | Node labels for pod assignment | `{}` | -| `scheduler.tolerations` | Tolerations for scheduler pod assignment. | `[]` | -| `scheduler.log.level` | The log level to log at. | `INFO` | -| `scheduler.extraEnv` | Additional env vars for scheduler pod(s). | `[]` | +| Name | Description | Value | +| ---------------------------------------------- | ------------------------------------------------------------------- | ------------------- | +| `scheduler.replicaCount` | Number of scheduler replicas | `1` | +| `scheduler.image.repository` | The repository to use for the airbyte scheduler image. | `airbyte/scheduler` | +| `scheduler.image.pullPolicy` | the pull policy to use for the airbyte scheduler image | `IfNotPresent` | +| `scheduler.image.tag` | The airbyte scheduler image tag. Defaults to the chart's AppVersion | `0.36.6-alpha` | +| `scheduler.podAnnotations` | Add extra annotations to the scheduler pod | `{}` | +| `scheduler.containerSecurityContext` | Security context for the container | `{}` | +| `scheduler.livenessProbe.enabled` | Enable livenessProbe on the scheduler | `true` | +| `scheduler.livenessProbe.initialDelaySeconds` | Initial delay seconds for livenessProbe | `5` | +| `scheduler.livenessProbe.periodSeconds` | Period seconds for livenessProbe | `30` | +| `scheduler.livenessProbe.timeoutSeconds` | Timeout seconds for livenessProbe | `1` | +| `scheduler.livenessProbe.failureThreshold` | Failure threshold for livenessProbe | `3` | +| `scheduler.livenessProbe.successThreshold` | Success threshold for livenessProbe | `1` | +| `scheduler.readinessProbe.enabled` | Enable readinessProbe on the scheduler | `true` | +| `scheduler.readinessProbe.initialDelaySeconds` | Initial delay seconds for readinessProbe | `5` | +| `scheduler.readinessProbe.periodSeconds` | Period seconds for readinessProbe | `30` | +| `scheduler.readinessProbe.timeoutSeconds` | Timeout seconds for readinessProbe | `1` | +| `scheduler.readinessProbe.failureThreshold` | Failure threshold for readinessProbe | `3` | +| `scheduler.readinessProbe.successThreshold` | Success threshold for readinessProbe | `1` | +| `scheduler.resources.limits` | The resources limits for the scheduler container | `{}` | +| `scheduler.resources.requests` | The requested resources for the scheduler container | `{}` | +| `scheduler.nodeSelector` | Node labels for pod assignment | `{}` | +| `scheduler.tolerations` | Tolerations for scheduler pod assignment. | `[]` | +| `scheduler.affinity` | Affinity and anti-affinity for scheduler pod assignment. | `{}` | +| `scheduler.log.level` | The log level to log at. | `INFO` | +| `scheduler.extraEnv` | Additional env vars for scheduler pod(s). | `[]` | +| `scheduler.extraVolumeMounts` | Additional volumeMounts for scheduler container(s). | `[]` | +| `scheduler.extraVolumes` | Additional volumes for scheduler pod(s). | `[]` | ### Pod Sweeper parameters @@ -139,9 +154,6 @@ Helm charts for Airbyte. | `server.resources.requests` | The requested resources for the server container | `{}` | | `server.service.type` | The service type to use for the API server | `ClusterIP` | | `server.service.port` | The service port to expose the API server on | `8001` | -| `server.persistence.accessMode` | The access mode for the airbyte server pvc | `ReadWriteOnce` | -| `server.persistence.size` | The size of the pvc to use for the airbyte server pvc | `1Gi` | -| `server.persistence.storageClass` | The storage class to use for the airbyte server pvc | `""` | | `server.nodeSelector` | Node labels for pod assignment | `{}` | | `server.tolerations` | Tolerations for server pod assignment. | `[]` | | `server.affinity` | Affinity and anti-affinity for server pod assignment. | `{}` | @@ -191,6 +203,9 @@ Helm charts for Airbyte. | `bootloader.image.repository` | The repository to use for the airbyte bootloader image. | `airbyte/bootloader` | | `bootloader.image.pullPolicy` | the pull policy to use for the airbyte bootloader image | `IfNotPresent` | | `bootloader.image.tag` | The airbyte bootloader image tag. Defaults to the chart's AppVersion | `0.36.6-alpha` | +| `bootloader.podAnnotations` | Add extra annotations to the bootloader pod | `{}` | +| `bootloader.nodeSelector` | Node labels for pod assignment | `{}` | +| `bootloader.tolerations` | Tolerations for worker pod assignment. | `[]` | ### Temporal parameters @@ -268,13 +283,17 @@ Helm charts for Airbyte. | `logs.gcs.credentials` | The path the GCS creds are written to | `""` | | `logs.gcs.credentialsJson` | Base64 encoded json GCP credentials file contents | `""` | + ### Minio chart overwrites -| Name | Description | Value | -| -------------------------- | -------------------------------- | ---------- | -| `minio.accessKey.password` | Minio Access Key | `minio` | -| `minio.secretKey.password` | Minio Secret Key | `minio123` | -| `jobs.resources.limits` | The resources limits for jobs | `{}` | -| `jobs.resources.requests` | The requested resources for jobs | `{}` | +| Name | Description | Value | +| -------------------------- | -------------------------------------------- | ---------- | +| `minio.accessKey.password` | Minio Access Key | `minio` | +| `minio.secretKey.password` | Minio Secret Key | `minio123` | +| `jobs.resources.limits` | The resources limits for jobs | `{}` | +| `jobs.resources.requests` | The requested resources for jobs | `{}` | +| `jobs.kube.annotations` | key/value annotations applied to kube jobs | `{}` | +| `jobs.kube.nodeSelector` | key/value node selector applied to kube jobs | `{}` | +| `jobs.kube.tolerations` | Tolerations for jobs.kube pod assignment. | `[]` | diff --git a/charts/airbyte/templates/server/deployment.yaml b/charts/airbyte/templates/server/deployment.yaml index 9d7fc164c75ab2..e93e308b4119ac 100644 --- a/charts/airbyte/templates/server/deployment.yaml +++ b/charts/airbyte/templates/server/deployment.yaml @@ -210,12 +210,6 @@ spec: securityContext: {{- toYaml .Values.server.containerSecurityContext | nindent 10 }} {{- end }} volumeMounts: - - name: airbyte-data - mountPath: /configs - subPath: configs - - name: airbyte-data - mountPath: /workspace - subPath: workspace - name: gcs-log-creds-volume mountPath: /secrets/gcs-log-creds readOnly: true @@ -223,9 +217,6 @@ spec: {{ toYaml .Values.server.extraVolumeMounts | nindent 8 }} {{- end }} volumes: - - name: airbyte-data - persistentVolumeClaim: - claimName: {{ include "common.names.fullname" . }}-data - name: gcs-log-creds-volume secret: secretName: {{ include "common.names.fullname" . }}-gcs-log-creds diff --git a/charts/airbyte/templates/server/pvc-data.yaml b/charts/airbyte/templates/server/pvc-data.yaml deleted file mode 100644 index 62ef1e3ccb3806..00000000000000 --- a/charts/airbyte/templates/server/pvc-data.yaml +++ /dev/null @@ -1,14 +0,0 @@ ---- -apiVersion: v1 -kind: PersistentVolumeClaim -metadata: - name: {{ include "common.names.fullname" . }}-data - labels: - airbyte: volume-data -spec: - accessModes: - - {{ .Values.server.persistence.accessMode | quote }} - resources: - requests: - storage: {{ .Values.server.persistence.size | quote }} - {{- include "common.storage.class" (dict "persistence" .Values.server.persistence "global" .Values.global) | nindent 2 }} diff --git a/charts/airbyte/values.yaml b/charts/airbyte/values.yaml index ad5ec07f8fcb67..77826f4fba7138 100644 --- a/charts/airbyte/values.yaml +++ b/charts/airbyte/values.yaml @@ -1,11 +1,9 @@ ## @section Global Parameters ## @param global.imageRegistry Global Docker image registry -## @param global.storageClass Global StorageClass for Persistent Volume(s) ## global: imageRegistry: "" - storageClass: "" ## @section Common Parameters @@ -512,20 +510,6 @@ server: type: ClusterIP port: 8001 - ## @param server.persistence.accessMode The access mode for the airbyte server pvc - ## @param server.persistence.size The size of the pvc to use for the airbyte server pvc - persistence: - size: 1Gi - accessMode: ReadWriteOnce - ## @param server.persistence.storageClass The storage class to use for the airbyte server pvc - ## If defined, storageClassName: - ## If set to "-", storageClassName: "", which disables dynamic provisioning - ## If undefined (the default) or set to null, no storageClassName spec is - ## set, choosing the default provisioner. (gp2 on AWS, standard on - ## GKE, AWS & OpenStack) - ## - storageClass: "" - ## @param server.nodeSelector [object] Node labels for pod assignment ## Ref: https://kubernetes.io/docs/user-guide/node-selection/ ## @@ -922,8 +906,8 @@ logs: ## Google Cloud Storage (GCS) Log Location Configuration ## @param logs.gcs.bucket GCS bucket name - ## @param logs.gcs.credentials The path the GCS creds file is located - ## @param logs.gcs.credentialsJson Base64 encoded JSON credentials + ## @param logs.gcs.credentials The path the GCS creds are written to + ## @param logs.gcs.credentialsJson Base64 encoded json GCP credentials file contents gcs: bucket: "" # If you are mounting an existing secret to extraVolumes on scheduler, server and worker @@ -965,12 +949,12 @@ jobs: kube: ## JOB_KUBE_ANNOTATIONS ## pod annotations of the sync job and the default pod annotations fallback for others jobs - ## @params jobs.kube.annotations [object] key/value annotations applied to kube jobs + ## @param jobs.kube.annotations [object] key/value annotations applied to kube jobs annotations: {} ## JOB_KUBE_NODE_SELECTORS ## pod node selector of the sync job and the default pod node selector fallback for others jobs - ## @params jobs.kubeSelector [object] key/value node selector applied to kube jobs + ## @param jobs.kube.nodeSelector [object] key/value node selector applied to kube jobs nodeSelector: {} ## JOB_KUBE_TOLERATIONS From e6ee556f0c02b5c5fb8f096f79daed3c2a09e29d Mon Sep 17 00:00:00 2001 From: Denis Davydov Date: Tue, 3 May 2022 17:24:33 +0300 Subject: [PATCH 062/152] Source Google Analytics: always sync data from two days ago (#12426) * #12013 source GA to Beta: always sync data from two days ago * #12013 GA to Beta: fix changelog * #12013 source GA to Beta: rm odd file * #12013 Source GA to Beta: comment out integration tests * #12013 expose isDataGolden field, assume missing field equals False * #12013 expose isDataGOlden flag: reword docs * auto-bump connector version Co-authored-by: Octavia Squidington III --- .../resources/seed/source_definitions.yaml | 2 +- .../src/main/resources/seed/source_specs.yaml | 10 +++--- .../source-google-analytics-v4/Dockerfile | 2 +- .../acceptance-test-config.yml | 10 +++--- .../integration_tests/expected_records.txt | 12 +++---- .../source_google_analytics_v4/source.py | 17 +++++----- ...records.json => response_golden_data.json} | 1 + ...lse.json => response_non_golden_data.json} | 1 - .../unit_tests/unit_test.py | 33 ++++++++++++------- .../sources/google-analytics-v4.md | 4 +++ 10 files changed, 55 insertions(+), 37 deletions(-) rename airbyte-integrations/connectors/source-google-analytics-v4/unit_tests/{response_with_records.json => response_golden_data.json} (96%) rename airbyte-integrations/connectors/source-google-analytics-v4/unit_tests/{response_is_data_golden_false.json => response_non_golden_data.json} (96%) diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index a9b096d3782cfc..a4d19c49fe09dc 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -279,7 +279,7 @@ - name: Google Analytics sourceDefinitionId: eff3616a-f9c3-11eb-9a03-0242ac130003 dockerRepository: airbyte/source-google-analytics-v4 - dockerImageTag: 0.1.19 + dockerImageTag: 0.1.20 documentationUrl: https://docs.airbyte.io/integrations/sources/google-analytics-v4 icon: google-analytics.svg sourceType: api diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index aafa7185f3cd2a..0012903832a0d0 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -2797,7 +2797,7 @@ oauthFlowOutputParameters: - - "access_token" - - "refresh_token" -- dockerImage: "airbyte/source-google-analytics-v4:0.1.19" +- dockerImage: "airbyte/source-google-analytics-v4:0.1.20" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/google-analytics-v4" connectionSpecification: @@ -2826,10 +2826,10 @@ - "2020-06-01" window_in_days: type: "integer" - title: "Window in days (Optional)" - description: "The amount of days each stream slice would consist of beginning\ - \ from start_date. Bigger the value - faster the fetch. (Min=1, as for\ - \ a Day; Max=364, as for a Year)." + title: "Data request window (Optional)" + description: "The amount of data batched by the number of days. The bigger\ + \ the value, the bigger the batch size and the lower the API requests\ + \ made. (Min=1, as for a Day; Max=364, as for a Year)." examples: - 30 - 60 diff --git a/airbyte-integrations/connectors/source-google-analytics-v4/Dockerfile b/airbyte-integrations/connectors/source-google-analytics-v4/Dockerfile index fffcbe5b003c52..56f7b74e9cd3b8 100644 --- a/airbyte-integrations/connectors/source-google-analytics-v4/Dockerfile +++ b/airbyte-integrations/connectors/source-google-analytics-v4/Dockerfile @@ -12,5 +12,5 @@ COPY main.py ./ ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.19 +LABEL io.airbyte.version=0.1.20 LABEL io.airbyte.name=airbyte/source-google-analytics-v4 diff --git a/airbyte-integrations/connectors/source-google-analytics-v4/acceptance-test-config.yml b/airbyte-integrations/connectors/source-google-analytics-v4/acceptance-test-config.yml index a697c0fc0955f2..920381712869a0 100644 --- a/airbyte-integrations/connectors/source-google-analytics-v4/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-google-analytics-v4/acceptance-test-config.yml @@ -20,10 +20,12 @@ tests: empty_streams: [] expect_records: path: "integration_tests/expected_records.txt" - incremental: - - config_path: "secrets/service_config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" - future_state_path: "integration_tests/abnormal_state.json" +# Since the connector makes 2 days look back window, it can not pass SAT where all records produce cursor value greater ao equal to a state value +# see https://github.com/airbytehq/airbyte/issues/12013 for details +# incremental: +# - config_path: "secrets/service_config.json" +# configured_catalog_path: "integration_tests/configured_catalog.json" +# future_state_path: "integration_tests/abnormal_state.json" full_refresh: - config_path: "secrets/service_config.json" configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-google-analytics-v4/integration_tests/expected_records.txt b/airbyte-integrations/connectors/source-google-analytics-v4/integration_tests/expected_records.txt index 3701178cfbe623..0a2dd91721f28f 100644 --- a/airbyte-integrations/connectors/source-google-analytics-v4/integration_tests/expected_records.txt +++ b/airbyte-integrations/connectors/source-google-analytics-v4/integration_tests/expected_records.txt @@ -1,6 +1,6 @@ -{"stream": "new_users_per_day", "data": {"ga_date": "2021-12-10", "ga_country": "United States", "ga_region": "Washington", "ga_newUsers": 1, "view_id": "211669975"}, "emitted_at": 1639563255199} -{"stream": "devices", "data": {"ga_date": "2021-12-10", "ga_deviceCategory": "desktop", "ga_operatingSystem": "Macintosh", "ga_browser": "Firefox", "ga_users": 1, "ga_newUsers": 1, "ga_sessions": 1, "ga_sessionsPerUser": 1.0, "ga_avgSessionDuration": 0.0, "ga_pageviews": 1, "ga_pageviewsPerSession": 1.0, "ga_avgTimeOnPage": 0.0, "ga_bounceRate": 100.0, "ga_exitRate": 100.0, "view_id": "211669975"}, "emitted_at": 1639563253285} -{"stream": "daily_active_users", "data": {"ga_date": "2021-12-10", "ga_1dayUsers": 1, "view_id": "211669975"}, "emitted_at": 1639563251092} -{"stream": "weekly_active_users", "data": {"ga_date": "2021-12-15", "ga_7dayUsers": 1, "view_id": "211669975"}, "emitted_at": 1639563249172} -{"stream": "locations", "data": {"ga_date": "2021-12-10", "ga_continent": "Americas", "ga_subContinent": "Northern America", "ga_country": "United States", "ga_region": "Washington", "ga_metro": "Seattle-Tacoma WA", "ga_city": "Seattle", "ga_users": 1, "ga_newUsers": 1, "ga_sessions": 1, "ga_sessionsPerUser": 1.0, "ga_avgSessionDuration": 0.0, "ga_pageviews": 1, "ga_pageviewsPerSession": 1.0, "ga_avgTimeOnPage": 0.0, "ga_bounceRate": 100.0, "ga_exitRate": 100.0, "view_id": "211669975"}, "emitted_at": 1639563230934} -{"stream": "pages", "data": {"ga_date": "2021-12-10", "ga_hostname": "www.surveymonkey.com", "ga_pagePath": "/apps/NKI5TOTqk4tS5BZyJXU9YQ_3D_3D/preview", "ga_pageviews": 1, "ga_uniquePageviews": 1, "ga_avgTimeOnPage": 0.0, "ga_entrances": 1, "ga_entranceRate": 100.0, "ga_bounceRate": 100.0, "ga_exits": 1, "ga_exitRate": 100.0, "view_id": "211669975"}, "emitted_at": 1639563227527} \ No newline at end of file +{"stream": "new_users_per_day", "data": {"ga_date": "2021-12-10", "ga_country": "United States", "ga_region": "Washington", "ga_newUsers": 1, "view_id": "211669975", "isDataGolden": true}, "emitted_at": 1639563255199} +{"stream": "devices", "data": {"ga_date": "2021-12-10", "ga_deviceCategory": "desktop", "ga_operatingSystem": "Macintosh", "ga_browser": "Firefox", "ga_users": 1, "ga_newUsers": 1, "ga_sessions": 1, "ga_sessionsPerUser": 1.0, "ga_avgSessionDuration": 0.0, "ga_pageviews": 1, "ga_pageviewsPerSession": 1.0, "ga_avgTimeOnPage": 0.0, "ga_bounceRate": 100.0, "ga_exitRate": 100.0, "view_id": "211669975", "isDataGolden": true}, "emitted_at": 1639563253285} +{"stream": "daily_active_users", "data": {"ga_date": "2021-12-10", "ga_1dayUsers": 1, "view_id": "211669975", "isDataGolden": true}, "emitted_at": 1639563251092} +{"stream": "weekly_active_users", "data": {"ga_date": "2021-12-15", "ga_7dayUsers": 1, "view_id": "211669975", "isDataGolden": true}, "emitted_at": 1639563249172} +{"stream": "locations", "data": {"ga_date": "2021-12-10", "ga_continent": "Americas", "ga_subContinent": "Northern America", "ga_country": "United States", "ga_region": "Washington", "ga_metro": "Seattle-Tacoma WA", "ga_city": "Seattle", "ga_users": 1, "ga_newUsers": 1, "ga_sessions": 1, "ga_sessionsPerUser": 1.0, "ga_avgSessionDuration": 0.0, "ga_pageviews": 1, "ga_pageviewsPerSession": 1.0, "ga_avgTimeOnPage": 0.0, "ga_bounceRate": 100.0, "ga_exitRate": 100.0, "view_id": "211669975", "isDataGolden": true}, "emitted_at": 1639563230934} +{"stream": "pages", "data": {"ga_date": "2021-12-10", "ga_hostname": "www.surveymonkey.com", "ga_pagePath": "/apps/NKI5TOTqk4tS5BZyJXU9YQ_3D_3D/preview", "ga_pageviews": 1, "ga_uniquePageviews": 1, "ga_avgTimeOnPage": 0.0, "ga_entrances": 1, "ga_entranceRate": 100.0, "ga_bounceRate": 100.0, "ga_exits": 1, "ga_exitRate": 100.0, "view_id": "211669975", "isDataGolden": true}, "emitted_at": 1639563227527} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-google-analytics-v4/source_google_analytics_v4/source.py b/airbyte-integrations/connectors/source-google-analytics-v4/source_google_analytics_v4/source.py index b6fcc161d5f2bb..70a0f5cdc0de24 100644 --- a/airbyte-integrations/connectors/source-google-analytics-v4/source_google_analytics_v4/source.py +++ b/airbyte-integrations/connectors/source-google-analytics-v4/source_google_analytics_v4/source.py @@ -209,7 +209,7 @@ def get_json_schema(self) -> Mapping[str, Any]: if data_format: metric_data["format"] = data_format schema["properties"][metric] = metric_data - + schema["properties"]["isDataGolden"] = {"type": "boolean"} return schema def stream_slices(self, stream_state: Mapping[str, Any] = None, **kwargs: Any) -> Iterable[Optional[Mapping[str, Any]]]: @@ -226,14 +226,15 @@ def stream_slices(self, stream_state: Mapping[str, Any] = None, **kwargs: Any) - ...] """ - today = pendulum.now().date() + end_date = pendulum.now().date() start_date = pendulum.parse(self.start_date).date() if stream_state: prev_end_date = pendulum.parse(stream_state.get(self.cursor_field)).date() - start_date = prev_end_date.add(days=1) - end_date = today - if start_date > end_date: - return [None] + start_date = prev_end_date.add(days=1) # do not include previous `end_date` + # always resync 2 previous days to be sure data is golden + # https://support.google.com/analytics/answer/1070983?hl=en#DataProcessingLatency&zippy=%2Cin-this-article + # https://github.com/airbytehq/airbyte/issues/12013#issuecomment-1111255503 + start_date = start_date.subtract(days=2) date_slices = [] slice_start_date = start_date @@ -403,11 +404,11 @@ def parse_response(self, response: requests.Response, **kwargs: Any) -> Iterable record[metric_name.replace("ga:", "ga_")] = value record["view_id"] = self.view_id - + record["isDataGolden"] = report.get("data", {}).get("isDataGolden", False) yield record def check_for_sampled_result(self, data: Mapping) -> None: - if not data.get("isDataGolden", True): + if not data.get("isDataGolden", False): self.logger.warning(DATA_IS_NOT_GOLDEN_MSG) if data.get("samplesReadCounts", False): self.logger.warning(RESULT_IS_SAMPLED_MSG) diff --git a/airbyte-integrations/connectors/source-google-analytics-v4/unit_tests/response_with_records.json b/airbyte-integrations/connectors/source-google-analytics-v4/unit_tests/response_golden_data.json similarity index 96% rename from airbyte-integrations/connectors/source-google-analytics-v4/unit_tests/response_with_records.json rename to airbyte-integrations/connectors/source-google-analytics-v4/unit_tests/response_golden_data.json index be89bd58587622..38b5a1af404ce7 100644 --- a/airbyte-integrations/connectors/source-google-analytics-v4/unit_tests/response_with_records.json +++ b/airbyte-integrations/connectors/source-google-analytics-v4/unit_tests/response_golden_data.json @@ -23,6 +23,7 @@ ] } ], + "isDataGolden": true, "totals": [ { "values": ["158"] diff --git a/airbyte-integrations/connectors/source-google-analytics-v4/unit_tests/response_is_data_golden_false.json b/airbyte-integrations/connectors/source-google-analytics-v4/unit_tests/response_non_golden_data.json similarity index 96% rename from airbyte-integrations/connectors/source-google-analytics-v4/unit_tests/response_is_data_golden_false.json rename to airbyte-integrations/connectors/source-google-analytics-v4/unit_tests/response_non_golden_data.json index ff7e3d23ad2385..486c27180ec372 100644 --- a/airbyte-integrations/connectors/source-google-analytics-v4/unit_tests/response_is_data_golden_false.json +++ b/airbyte-integrations/connectors/source-google-analytics-v4/unit_tests/response_non_golden_data.json @@ -23,7 +23,6 @@ ] } ], - "isDataGolden": false, "totals": [ { "values": ["158"] diff --git a/airbyte-integrations/connectors/source-google-analytics-v4/unit_tests/unit_test.py b/airbyte-integrations/connectors/source-google-analytics-v4/unit_tests/unit_test.py index b95ed88cbdf334..f5134467f412c9 100644 --- a/airbyte-integrations/connectors/source-google-analytics-v4/unit_tests/unit_test.py +++ b/airbyte-integrations/connectors/source-google-analytics-v4/unit_tests/unit_test.py @@ -11,7 +11,7 @@ import pendulum import pytest -from airbyte_cdk.models import ConfiguredAirbyteCatalog, SyncMode +from airbyte_cdk.models import ConfiguredAirbyteCatalog, SyncMode, Type from airbyte_cdk.sources.streams.http.auth import NoAuth from freezegun import freeze_time from source_google_analytics_v4.source import ( @@ -81,10 +81,10 @@ def mock_api_returns_no_records(requests_mock): @pytest.fixture def mock_api_returns_valid_records(requests_mock): """API returns valid data for given date based slice""" - yield requests_mock.post( - "https://analyticsreporting.googleapis.com/v4/reports:batchGet", - json=json.loads(read_file("response_with_records.json")), - ) + response = json.loads(read_file("response_golden_data.json")) + for report in response["reports"]: + assert report["data"]["isDataGolden"] is True + yield requests_mock.post("https://analyticsreporting.googleapis.com/v4/reports:batchGet", json=response) @pytest.fixture @@ -99,10 +99,10 @@ def mock_api_returns_sampled_results(requests_mock): @pytest.fixture def mock_api_returns_is_data_golden_false(requests_mock): """API returns valid data for given date based slice""" - yield requests_mock.post( - "https://analyticsreporting.googleapis.com/v4/reports:batchGet", - json=json.loads(read_file("response_is_data_golden_false.json")), - ) + response = json.loads(read_file("response_non_golden_data.json")) + for report in response["reports"]: + assert "isDataGolden" not in report["data"] + yield requests_mock.post("https://analyticsreporting.googleapis.com/v4/reports:batchGet", json=response) @pytest.fixture @@ -310,13 +310,15 @@ def test_unknown_metrics_or_dimensions_error_validation(mock_metrics_dimensions_ @freeze_time("2021-11-30") -def test_stream_slices_limited_by_current_date(test_config, mock_metrics_dimensions_type_list_link): +def test_stream_slice_limits(test_config, mock_metrics_dimensions_type_list_link): test_config["window_in_days"] = 14 g = GoogleAnalyticsV4IncrementalObjectsBase(config=test_config) stream_state = {"ga_date": "2021-11-25"} slices = g.stream_slices(stream_state=stream_state) current_date = pendulum.now().date().strftime("%Y-%m-%d") - assert slices == [{"startDate": "2021-11-26", "endDate": current_date}] + expected_start_date = "2021-11-24" # always resync two days back + expected_end_date = current_date # do not try to sync future dates + assert slices == [{"startDate": expected_start_date, "endDate": expected_end_date}] @freeze_time("2021-11-30") @@ -370,3 +372,12 @@ def test_connection_fail_due_to_http_status( assert "Please check the permissions for the requested view_id" in error assert test_config["view_id"] in error assert json_resp["error"] in error + + +def test_is_data_golden_flag_missing_equals_false( + mock_api_returns_is_data_golden_false, test_config, configured_catalog, mock_metrics_dimensions_type_list_link, mock_auth_call +): + source = SourceGoogleAnalyticsV4() + for message in source.read(logging.getLogger(), test_config, configured_catalog): + if message.type == Type.RECORD: + assert message.record.data["isDataGolden"] is False diff --git a/docs/integrations/sources/google-analytics-v4.md b/docs/integrations/sources/google-analytics-v4.md index 7b57aabc8483e2..e62dd233d11067 100644 --- a/docs/integrations/sources/google-analytics-v4.md +++ b/docs/integrations/sources/google-analytics-v4.md @@ -97,6 +97,9 @@ When sampling occurs, a warning is logged to the sync log. ## IsDataGolden Google Analytics API may return provisional or incomplete data. When this occurs, the returned data will set the flag `isDataGolden` to false, and the connector will log a warning to the sync log. +The connector adds a lookback window of 2 days to ensure any previously synced non-golden data is re-synced with its potential updates. This is done because [Google Analytics takes up to 48 hours](https://support.google.com/analytics/answer/1070983?hl=en#DataProcessingLatency&zippy=%2Cin-this-article) to update the data. For example: +- If your last sync occurred 5 days ago and a sync kicks off today, it will attempt to sync data from 7 days ago up to the latest data available. +To determine whether data is finished processing or not, the `isDataGolden` flag is exposed and should be used. ## Reading Custom Reports @@ -159,6 +162,7 @@ Incremental sync is supported only if you add `ga:date` dimension to your custom | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:---------------------------------------------------------------------------------------------| +| 0.1.20 | 2022-04-28 | [12426](https://github.com/airbytehq/airbyte/pull/12426) | Expose `isDataGOlden` field and always resync data two days back to make sure it is golden | | 0.1.19 | 2022-04-19 | [12150](https://github.com/airbytehq/airbyte/pull/12150) | Minor changes to documentation | | 0.1.18 | 2022-04-07 | [11803](https://github.com/airbytehq/airbyte/pull/11803) | Improved documentation | | 0.1.17 | 2022-03-31 | [11512](https://github.com/airbytehq/airbyte/pull/11512) | Improved Unit and Acceptance tests coverage, fixed `read` with abnormally large state values | From b22ae4f161e22dd9c9dd4ffa310c5fe5a64e06d6 Mon Sep 17 00:00:00 2001 From: Octavia Squidington III <90398440+octavia-squidington-iii@users.noreply.github.com> Date: Tue, 3 May 2022 12:08:36 -0300 Subject: [PATCH 063/152] Bump Airbyte version from 0.36.6-alpha to 0.36.7-alpha (#12535) Co-authored-by: davinchia --- .bumpversion.cfg | 2 +- .env | 2 +- airbyte-bootloader/Dockerfile | 2 +- airbyte-container-orchestrator/Dockerfile | 2 +- airbyte-metrics/reporter/Dockerfile | 2 +- airbyte-scheduler/app/Dockerfile | 2 +- airbyte-server/Dockerfile | 2 +- airbyte-webapp/package-lock.json | 4 ++-- airbyte-webapp/package.json | 2 +- airbyte-workers/Dockerfile | 2 +- charts/airbyte/Chart.yaml | 2 +- charts/airbyte/README.md | 10 +++++----- charts/airbyte/values.yaml | 10 +++++----- docs/operator-guides/upgrading-airbyte.md | 2 +- kube/overlays/stable-with-resource-limits/.env | 2 +- .../stable-with-resource-limits/kustomization.yaml | 12 ++++++------ kube/overlays/stable/.env | 2 +- kube/overlays/stable/kustomization.yaml | 12 ++++++------ octavia-cli/Dockerfile | 2 +- octavia-cli/README.md | 2 +- octavia-cli/install.sh | 2 +- octavia-cli/setup.py | 2 +- 22 files changed, 41 insertions(+), 41 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 7cbf98e4587d74..6021be6ddb4eb6 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.36.6-alpha +current_version = 0.36.7-alpha commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\-[a-z]+)? diff --git a/.env b/.env index 8155983831749a..08649fc84d4cb8 100644 --- a/.env +++ b/.env @@ -10,7 +10,7 @@ ### SHARED ### -VERSION=0.36.6-alpha +VERSION=0.36.7-alpha # When using the airbyte-db via default docker image CONFIG_ROOT=/data diff --git a/airbyte-bootloader/Dockerfile b/airbyte-bootloader/Dockerfile index 74e2c9d0c95932..21a32db18ff660 100644 --- a/airbyte-bootloader/Dockerfile +++ b/airbyte-bootloader/Dockerfile @@ -1,7 +1,7 @@ ARG JDK_VERSION=17.0.1 FROM openjdk:${JDK_VERSION}-slim -ARG VERSION=0.36.6-alpha +ARG VERSION=0.36.7-alpha ENV APPLICATION airbyte-bootloader ENV VERSION ${VERSION} diff --git a/airbyte-container-orchestrator/Dockerfile b/airbyte-container-orchestrator/Dockerfile index 4eb292021a6bdf..0a6ae62c8ee165 100644 --- a/airbyte-container-orchestrator/Dockerfile +++ b/airbyte-container-orchestrator/Dockerfile @@ -25,7 +25,7 @@ RUN curl -fsSLo /usr/share/keyrings/kubernetes-archive-keyring.gpg https://packa RUN echo "deb [signed-by=/usr/share/keyrings/kubernetes-archive-keyring.gpg] https://apt.kubernetes.io/ kubernetes-xenial main" | tee /etc/apt/sources.list.d/kubernetes.list RUN apt-get update && apt-get install -y kubectl -ARG VERSION=0.36.6-alpha +ARG VERSION=0.36.7-alpha ENV APPLICATION airbyte-container-orchestrator ENV VERSION=${VERSION} diff --git a/airbyte-metrics/reporter/Dockerfile b/airbyte-metrics/reporter/Dockerfile index 25890b1690b434..6a911df6f8b7bf 100644 --- a/airbyte-metrics/reporter/Dockerfile +++ b/airbyte-metrics/reporter/Dockerfile @@ -1,7 +1,7 @@ ARG JDK_VERSION=17.0.1 FROM openjdk:${JDK_VERSION}-slim AS metrics-reporter -ARG VERSION=0.36.6-alpha +ARG VERSION=0.36.7-alpha ENV APPLICATION airbyte-metrics-reporter ENV VERSION ${VERSION} diff --git a/airbyte-scheduler/app/Dockerfile b/airbyte-scheduler/app/Dockerfile index b841e267f1ed10..462d52d542d2b6 100644 --- a/airbyte-scheduler/app/Dockerfile +++ b/airbyte-scheduler/app/Dockerfile @@ -1,7 +1,7 @@ ARG JDK_VERSION=17.0.1 FROM openjdk:${JDK_VERSION}-slim AS scheduler -ARG VERSION=0.36.6-alpha +ARG VERSION=0.36.7-alpha ENV APPLICATION airbyte-scheduler ENV VERSION ${VERSION} diff --git a/airbyte-server/Dockerfile b/airbyte-server/Dockerfile index 6f514552ec5aa4..70f075dfa628fc 100644 --- a/airbyte-server/Dockerfile +++ b/airbyte-server/Dockerfile @@ -3,7 +3,7 @@ FROM openjdk:${JDK_VERSION}-slim AS server EXPOSE 8000 -ARG VERSION=0.36.6-alpha +ARG VERSION=0.36.7-alpha ENV APPLICATION airbyte-server ENV VERSION ${VERSION} diff --git a/airbyte-webapp/package-lock.json b/airbyte-webapp/package-lock.json index 3625e37351bde8..f8f4993df68c39 100644 --- a/airbyte-webapp/package-lock.json +++ b/airbyte-webapp/package-lock.json @@ -1,12 +1,12 @@ { "name": "airbyte-webapp", - "version": "0.36.6-alpha", + "version": "0.36.7-alpha", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "airbyte-webapp", - "version": "0.36.6-alpha", + "version": "0.36.7-alpha", "dependencies": { "@fortawesome/fontawesome-svg-core": "^6.1.1", "@fortawesome/free-brands-svg-icons": "^6.1.1", diff --git a/airbyte-webapp/package.json b/airbyte-webapp/package.json index 317d3635b31b4a..b5a11695998e50 100644 --- a/airbyte-webapp/package.json +++ b/airbyte-webapp/package.json @@ -1,6 +1,6 @@ { "name": "airbyte-webapp", - "version": "0.36.6-alpha", + "version": "0.36.7-alpha", "private": true, "engines": { "node": ">=16.0.0" diff --git a/airbyte-workers/Dockerfile b/airbyte-workers/Dockerfile index efe47ace83bc6d..28c4cb9cf4f4cf 100644 --- a/airbyte-workers/Dockerfile +++ b/airbyte-workers/Dockerfile @@ -25,7 +25,7 @@ RUN curl -fsSLo /usr/share/keyrings/kubernetes-archive-keyring.gpg https://packa RUN echo "deb [signed-by=/usr/share/keyrings/kubernetes-archive-keyring.gpg] https://apt.kubernetes.io/ kubernetes-xenial main" | tee /etc/apt/sources.list.d/kubernetes.list RUN apt-get update && apt-get install -y kubectl -ARG VERSION=0.36.6-alpha +ARG VERSION=0.36.7-alpha ENV APPLICATION airbyte-workers ENV VERSION ${VERSION} diff --git a/charts/airbyte/Chart.yaml b/charts/airbyte/Chart.yaml index ccbd47b7fcbabd..b3b297b6d07408 100644 --- a/charts/airbyte/Chart.yaml +++ b/charts/airbyte/Chart.yaml @@ -21,7 +21,7 @@ version: 0.3.2 # incremented each time you make changes to the application. Versions are not expected to # follow Semantic Versioning. They should reflect the version the application is using. # It is recommended to use it with quotes. -appVersion: "0.36.6-alpha" +appVersion: "0.36.7-alpha" dependencies: - name: common diff --git a/charts/airbyte/README.md b/charts/airbyte/README.md index 7dd042a6f06bcb..5a09157e6e6cc1 100644 --- a/charts/airbyte/README.md +++ b/charts/airbyte/README.md @@ -30,7 +30,7 @@ Helm charts for Airbyte. | `webapp.replicaCount` | Number of webapp replicas | `1` | | `webapp.image.repository` | The repository to use for the airbyte webapp image. | `airbyte/webapp` | | `webapp.image.pullPolicy` | the pull policy to use for the airbyte webapp image | `IfNotPresent` | -| `webapp.image.tag` | The airbyte webapp image tag. Defaults to the chart's AppVersion | `0.36.6-alpha` | +| `webapp.image.tag` | The airbyte webapp image tag. Defaults to the chart's AppVersion | `0.36.7-alpha` | | `webapp.podAnnotations` | Add extra annotations to the webapp pod(s) | `{}` | | `webapp.containerSecurityContext` | Security context for the container | `{}` | | `webapp.livenessProbe.enabled` | Enable livenessProbe on the webapp | `true` | @@ -72,7 +72,7 @@ Helm charts for Airbyte. | `scheduler.replicaCount` | Number of scheduler replicas | `1` | | `scheduler.image.repository` | The repository to use for the airbyte scheduler image. | `airbyte/scheduler` | | `scheduler.image.pullPolicy` | the pull policy to use for the airbyte scheduler image | `IfNotPresent` | -| `scheduler.image.tag` | The airbyte scheduler image tag. Defaults to the chart's AppVersion | `0.36.6-alpha` | +| `scheduler.image.tag` | The airbyte scheduler image tag. Defaults to the chart's AppVersion | `0.36.7-alpha` | | `scheduler.podAnnotations` | Add extra annotations to the scheduler pod | `{}` | | `scheduler.containerSecurityContext` | Security context for the container | `{}` | | `scheduler.livenessProbe.enabled` | Enable livenessProbe on the scheduler | `true` | @@ -135,7 +135,7 @@ Helm charts for Airbyte. | `server.replicaCount` | Number of server replicas | `1` | | `server.image.repository` | The repository to use for the airbyte server image. | `airbyte/server` | | `server.image.pullPolicy` | the pull policy to use for the airbyte server image | `IfNotPresent` | -| `server.image.tag` | The airbyte server image tag. Defaults to the chart's AppVersion | `0.36.6-alpha` | +| `server.image.tag` | The airbyte server image tag. Defaults to the chart's AppVersion | `0.36.7-alpha` | | `server.podAnnotations` | Add extra annotations to the server pod | `{}` | | `server.containerSecurityContext` | Security context for the container | `{}` | | `server.livenessProbe.enabled` | Enable livenessProbe on the server | `true` | @@ -170,7 +170,7 @@ Helm charts for Airbyte. | `worker.replicaCount` | Number of worker replicas | `1` | | `worker.image.repository` | The repository to use for the airbyte worker image. | `airbyte/worker` | | `worker.image.pullPolicy` | the pull policy to use for the airbyte worker image | `IfNotPresent` | -| `worker.image.tag` | The airbyte worker image tag. Defaults to the chart's AppVersion | `0.36.6-alpha` | +| `worker.image.tag` | The airbyte worker image tag. Defaults to the chart's AppVersion | `0.36.7-alpha` | | `worker.podAnnotations` | Add extra annotations to the worker pod(s) | `{}` | | `worker.containerSecurityContext` | Security context for the container | `{}` | | `worker.livenessProbe.enabled` | Enable livenessProbe on the worker | `true` | @@ -202,7 +202,7 @@ Helm charts for Airbyte. | ----------------------------- | -------------------------------------------------------------------- | -------------------- | | `bootloader.image.repository` | The repository to use for the airbyte bootloader image. | `airbyte/bootloader` | | `bootloader.image.pullPolicy` | the pull policy to use for the airbyte bootloader image | `IfNotPresent` | -| `bootloader.image.tag` | The airbyte bootloader image tag. Defaults to the chart's AppVersion | `0.36.6-alpha` | +| `bootloader.image.tag` | The airbyte bootloader image tag. Defaults to the chart's AppVersion | `0.36.7-alpha` | | `bootloader.podAnnotations` | Add extra annotations to the bootloader pod | `{}` | | `bootloader.nodeSelector` | Node labels for pod assignment | `{}` | | `bootloader.tolerations` | Tolerations for worker pod assignment. | `[]` | diff --git a/charts/airbyte/values.yaml b/charts/airbyte/values.yaml index 77826f4fba7138..9a480a3e40a950 100644 --- a/charts/airbyte/values.yaml +++ b/charts/airbyte/values.yaml @@ -41,7 +41,7 @@ webapp: image: repository: airbyte/webapp pullPolicy: IfNotPresent - tag: 0.36.6-alpha + tag: 0.36.7-alpha ## @param webapp.podAnnotations [object] Add extra annotations to the webapp pod(s) ## @@ -207,7 +207,7 @@ scheduler: image: repository: airbyte/scheduler pullPolicy: IfNotPresent - tag: 0.36.6-alpha + tag: 0.36.7-alpha ## @param scheduler.podAnnotations [object] Add extra annotations to the scheduler pod ## @@ -438,7 +438,7 @@ server: image: repository: airbyte/server pullPolicy: IfNotPresent - tag: 0.36.6-alpha + tag: 0.36.7-alpha ## @param server.podAnnotations [object] Add extra annotations to the server pod ## @@ -565,7 +565,7 @@ worker: image: repository: airbyte/worker pullPolicy: IfNotPresent - tag: 0.36.6-alpha + tag: 0.36.7-alpha ## @param worker.podAnnotations [object] Add extra annotations to the worker pod(s) ## @@ -683,7 +683,7 @@ bootloader: image: repository: airbyte/bootloader pullPolicy: IfNotPresent - tag: 0.36.6-alpha + tag: 0.36.7-alpha ## @param bootloader.podAnnotations [object] Add extra annotations to the bootloader pod ## diff --git a/docs/operator-guides/upgrading-airbyte.md b/docs/operator-guides/upgrading-airbyte.md index acd9582b16dc1b..796676d9edf863 100644 --- a/docs/operator-guides/upgrading-airbyte.md +++ b/docs/operator-guides/upgrading-airbyte.md @@ -103,7 +103,7 @@ If you are upgrading from (i.e. your current version of Airbyte is) Airbyte vers Here's an example of what it might look like with the values filled in. It assumes that the downloaded `airbyte_archive.tar.gz` is in `/tmp`. ```bash - docker run --rm -v /tmp:/config airbyte/migration:0.36.6-alpha --\ + docker run --rm -v /tmp:/config airbyte/migration:0.36.7-alpha --\ --input /config/airbyte_archive.tar.gz\ --output /config/airbyte_archive_migrated.tar.gz ``` diff --git a/kube/overlays/stable-with-resource-limits/.env b/kube/overlays/stable-with-resource-limits/.env index 1b6354b471841b..7030d2653dbb16 100644 --- a/kube/overlays/stable-with-resource-limits/.env +++ b/kube/overlays/stable-with-resource-limits/.env @@ -1,4 +1,4 @@ -AIRBYTE_VERSION=0.36.6-alpha +AIRBYTE_VERSION=0.36.7-alpha # Airbyte Internal Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db DATABASE_HOST=airbyte-db-svc diff --git a/kube/overlays/stable-with-resource-limits/kustomization.yaml b/kube/overlays/stable-with-resource-limits/kustomization.yaml index 7e2fca766fe400..32d99c5171d786 100644 --- a/kube/overlays/stable-with-resource-limits/kustomization.yaml +++ b/kube/overlays/stable-with-resource-limits/kustomization.yaml @@ -8,17 +8,17 @@ bases: images: - name: airbyte/db - newTag: 0.36.6-alpha + newTag: 0.36.7-alpha - name: airbyte/bootloader - newTag: 0.36.6-alpha + newTag: 0.36.7-alpha - name: airbyte/scheduler - newTag: 0.36.6-alpha + newTag: 0.36.7-alpha - name: airbyte/server - newTag: 0.36.6-alpha + newTag: 0.36.7-alpha - name: airbyte/webapp - newTag: 0.36.6-alpha + newTag: 0.36.7-alpha - name: airbyte/worker - newTag: 0.36.6-alpha + newTag: 0.36.7-alpha - name: temporalio/auto-setup newTag: 1.7.0 diff --git a/kube/overlays/stable/.env b/kube/overlays/stable/.env index a3fe3a2a989146..621fa75ccb97c7 100644 --- a/kube/overlays/stable/.env +++ b/kube/overlays/stable/.env @@ -1,4 +1,4 @@ -AIRBYTE_VERSION=0.36.6-alpha +AIRBYTE_VERSION=0.36.7-alpha # Airbyte Internal Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db DATABASE_HOST=airbyte-db-svc diff --git a/kube/overlays/stable/kustomization.yaml b/kube/overlays/stable/kustomization.yaml index d68e6d49ba26f4..cd8a10a3ac1d57 100644 --- a/kube/overlays/stable/kustomization.yaml +++ b/kube/overlays/stable/kustomization.yaml @@ -8,17 +8,17 @@ bases: images: - name: airbyte/db - newTag: 0.36.6-alpha + newTag: 0.36.7-alpha - name: airbyte/bootloader - newTag: 0.36.6-alpha + newTag: 0.36.7-alpha - name: airbyte/scheduler - newTag: 0.36.6-alpha + newTag: 0.36.7-alpha - name: airbyte/server - newTag: 0.36.6-alpha + newTag: 0.36.7-alpha - name: airbyte/webapp - newTag: 0.36.6-alpha + newTag: 0.36.7-alpha - name: airbyte/worker - newTag: 0.36.6-alpha + newTag: 0.36.7-alpha - name: temporalio/auto-setup newTag: 1.7.0 diff --git a/octavia-cli/Dockerfile b/octavia-cli/Dockerfile index 75899e30eeebb5..2c52dd73780c1b 100644 --- a/octavia-cli/Dockerfile +++ b/octavia-cli/Dockerfile @@ -14,5 +14,5 @@ USER octavia-cli WORKDIR /home/octavia-project ENTRYPOINT ["octavia"] -LABEL io.airbyte.version=0.36.6-alpha +LABEL io.airbyte.version=0.36.7-alpha LABEL io.airbyte.name=airbyte/octavia-cli diff --git a/octavia-cli/README.md b/octavia-cli/README.md index 491a7025942ac6..9a8794ef510fbf 100644 --- a/octavia-cli/README.md +++ b/octavia-cli/README.md @@ -105,7 +105,7 @@ This script: ```bash touch ~/.octavia # Create a file to store env variables that will be mapped the octavia-cli container mkdir my_octavia_project_directory # Create your octavia project directory where YAML configurations will be stored. -docker run --name octavia-cli -i --rm -v my_octavia_project_directory:/home/octavia-project --network host --user $(id -u):$(id -g) --env-file ~/.octavia airbyte/octavia-cli:0.36.6-alpha +docker run --name octavia-cli -i --rm -v my_octavia_project_directory:/home/octavia-project --network host --user $(id -u):$(id -g) --env-file ~/.octavia airbyte/octavia-cli:0.36.7-alpha ``` ### Using `docker-compose` diff --git a/octavia-cli/install.sh b/octavia-cli/install.sh index 66da8b26d01e19..8d5c215422efb7 100755 --- a/octavia-cli/install.sh +++ b/octavia-cli/install.sh @@ -3,7 +3,7 @@ # This install scripts currently only works for ZSH and Bash profiles. # It creates an octavia alias in your profile bound to a docker run command and your current user. -VERSION=0.36.6-alpha +VERSION=0.36.7-alpha OCTAVIA_ENV_FILE=${HOME}/.octavia detect_profile() { diff --git a/octavia-cli/setup.py b/octavia-cli/setup.py index dcb27719e6b6c3..ccef25a59b6324 100644 --- a/octavia-cli/setup.py +++ b/octavia-cli/setup.py @@ -15,7 +15,7 @@ setup( name="octavia-cli", - version="0.36.6", + version="0.36.7", description="A command line interface to manage Airbyte configurations", long_description=README, author="Airbyte", From 17892e82b86f143f0bcf93708eeb228af238e42e Mon Sep 17 00:00:00 2001 From: Davin Chia Date: Tue, 3 May 2022 23:11:08 +0800 Subject: [PATCH 064/152] Refactor process naming logic. (#12533) Follow up to #12503. Combine the naming logic to avoid duplication as they are mostly similar. Since Kubernetes has stricter conventions, we use the stricter convention throughout. Move the naming function to DockerProcessFactory. --- .../workers/process/DockerProcessFactory.java | 26 +--------- .../workers/process/KubeProcessFactory.java | 49 ++----------------- .../workers/process/ProcessFactory.java | 42 ++++++++++++++++ ...ctoryTest.java => ProcessFactoryTest.java} | 19 ++++--- 4 files changed, 60 insertions(+), 76 deletions(-) rename airbyte-workers/src/test/java/io/airbyte/workers/process/{KubeProcessFactoryTest.java => ProcessFactoryTest.java} (65%) diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/process/DockerProcessFactory.java b/airbyte-workers/src/main/java/io/airbyte/workers/process/DockerProcessFactory.java index 61c33bd6db3810..9dae81f6bf0f5f 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/process/DockerProcessFactory.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/process/DockerProcessFactory.java @@ -23,15 +23,13 @@ import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; -import org.apache.commons.lang3.RandomStringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class DockerProcessFactory implements ProcessFactory { private static final Logger LOGGER = LoggerFactory.getLogger(DockerProcessFactory.class); - private static final String VERSION_DELIMITER = ":"; - private static final String DOCKER_DELIMITER = "/"; + private static final int DOCKER_NAME_LEN_LIMIT = 128; private static final Path DATA_MOUNT_DESTINATION = Path.of("/data"); private static final Path LOCAL_MOUNT_DESTINATION = Path.of("/local"); @@ -117,7 +115,7 @@ public Process create(final String jobId, rebasePath(jobRoot).toString(), // rebases the job root on the job data mount "--log-driver", "none"); - final String containerName = createContainerName(imageName, jobId, attempt); + final String containerName = ProcessFactory.createProcessName(imageName, jobId, attempt, DOCKER_NAME_LEN_LIMIT); cmd.add("--name"); cmd.add(containerName); @@ -169,26 +167,6 @@ public Process create(final String jobId, } } - private static String createContainerName(final String fullImagePath, final String jobId, final int attempt) { - final var noVersion = fullImagePath.split(VERSION_DELIMITER)[0]; - - final var nameParts = noVersion.split(DOCKER_DELIMITER); - var imageName = nameParts[nameParts.length - 1]; - - final var randSuffix = RandomStringUtils.randomAlphabetic(5).toLowerCase(); - final String suffix = "sync" + "-" + jobId + "-" + attempt + "-" + randSuffix; - - var podName = imageName + "-" + suffix; - final var podNameLenLimit = 128; - if (podName.length() > podNameLenLimit) { - final var extra = podName.length() - podNameLenLimit; - imageName = imageName.substring(extra); - podName = imageName + "-" + suffix; - } - - return podName; - } - private Path rebasePath(final Path jobRoot) { final Path relativePath = workspaceRoot.relativize(jobRoot); return DATA_MOUNT_DESTINATION.resolve(relativePath); diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/process/KubeProcessFactory.java b/airbyte-workers/src/main/java/io/airbyte/workers/process/KubeProcessFactory.java index 39e46f6b6afd44..c2487da892a267 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/process/KubeProcessFactory.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/process/KubeProcessFactory.java @@ -15,14 +15,14 @@ import java.nio.file.Path; import java.util.HashMap; import java.util.Map; -import java.util.regex.Matcher; -import java.util.regex.Pattern; -import org.apache.commons.lang3.RandomStringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class KubeProcessFactory implements ProcessFactory { + @VisibleForTesting + public static final int KUBE_NAME_LEN_LIMIT = 63; + private static final Logger LOGGER = LoggerFactory.getLogger(KubeProcessFactory.class); public static final String JOB_TYPE = "job_type"; @@ -31,15 +31,12 @@ public class KubeProcessFactory implements ProcessFactory { public static final String CHECK_JOB = "check"; public static final String DISCOVER_JOB = "discover"; - public static final String SYNC_RUNNER = "sync-runner"; - public static final String SYNC_STEP = "sync_step"; public static final String READ_STEP = "read"; public static final String WRITE_STEP = "write"; public static final String NORMALISE_STEP = "normalise"; public static final String CUSTOM_STEP = "custom"; - private static final Pattern ALPHABETIC = Pattern.compile("[a-zA-Z]+"); private static final String JOB_LABEL_KEY = "job_id"; private static final String ATTEMPT_LABEL_KEY = "attempt_id"; private static final String WORKER_POD_LABEL_KEY = "airbyte"; @@ -109,7 +106,7 @@ public Process create(final String jobId, throws WorkerException { try { // used to differentiate source and destination processes with the same id and attempt - final String podName = createPodName(imageName, jobId, attempt); + final String podName = ProcessFactory.createProcessName(imageName, jobId, attempt, KUBE_NAME_LEN_LIMIT); LOGGER.info("Attempting to start pod = {} for {}", podName, imageName); final int stdoutLocalPort = KubePortManagerSingleton.getInstance().take(); @@ -165,42 +162,4 @@ public static Map getLabels(final String jobId, final int attemp return allLabels; } - /** - * Docker image names are by convention separated by slashes. The last portion is the image's name. - * This is followed by a colon and a version number. e.g. airbyte/scheduler:v1 or - * gcr.io/my-project/image-name:v2. - * - * Kubernetes has a maximum pod name length of 63 characters, and names must start with an - * alphabetic character. - * - * With these two facts, attempt to construct a unique Pod name with the image name present for - * easier operations. - */ - @VisibleForTesting - protected static String createPodName(final String fullImagePath, final String jobId, final int attempt) { - final var versionDelimiter = ":"; - final var noVersion = fullImagePath.split(versionDelimiter)[0]; - - final var dockerDelimiter = "/"; - final var nameParts = noVersion.split(dockerDelimiter); - var imageName = nameParts[nameParts.length - 1]; - - final var randSuffix = RandomStringUtils.randomAlphabetic(5).toLowerCase(); - final String suffix = "sync" + "-" + jobId + "-" + attempt + "-" + randSuffix; - - var podName = imageName + "-" + suffix; - final var podNameLenLimit = 63; - if (podName.length() > podNameLenLimit) { - final var extra = podName.length() - podNameLenLimit; - imageName = imageName.substring(extra); - podName = imageName + "-" + suffix; - } - final Matcher m = ALPHABETIC.matcher(podName); - // Since we add sync-UUID as a suffix a couple of lines up, there will always be a substring - // starting with an alphabetic character. - // If the image name is a no-op, this function should always return `sync-UUID` at the minimum. - m.find(); - return podName.substring(m.start()); - } - } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/process/ProcessFactory.java b/airbyte-workers/src/main/java/io/airbyte/workers/process/ProcessFactory.java index edd1e59508c88a..724763fcb9ffa4 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/process/ProcessFactory.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/process/ProcessFactory.java @@ -8,9 +8,16 @@ import io.airbyte.workers.WorkerException; import java.nio.file.Path; import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import org.apache.commons.lang3.RandomStringUtils; public interface ProcessFactory { + String VERSION_DELIMITER = ":"; + String DOCKER_DELIMITER = "/"; + Pattern ALPHABETIC = Pattern.compile("[a-zA-Z]+"); + /** * Creates a ProcessBuilder to run a program in a new Process. * @@ -44,4 +51,39 @@ Process create(String jobId, final String... args) throws WorkerException; + /** + * Docker image names are by convention separated by slashes. The last portion is the image's name. + * This is followed by a colon and a version number. e.g. airbyte/scheduler:v1 or + * gcr.io/my-project/image-name:v2. + * + * With these two facts, attempt to construct a unique process name with the image name present that + * can be used by the factories implementing this interface for easier operations. + */ + static String createProcessName(final String fullImagePath, final String jobId, final int attempt, final int lenLimit) { + final var noVersion = fullImagePath.split(VERSION_DELIMITER)[0]; + + final var nameParts = noVersion.split(DOCKER_DELIMITER); + var imageName = nameParts[nameParts.length - 1]; + + final var randSuffix = RandomStringUtils.randomAlphabetic(5).toLowerCase(); + final String suffix = "sync" + "-" + jobId + "-" + attempt + "-" + randSuffix; + + var processName = imageName + "-" + suffix; + if (processName.length() > lenLimit) { + final var extra = processName.length() - lenLimit; + imageName = imageName.substring(extra); + processName = imageName + "-" + suffix; + } + + // Kubernetes pod names must start with an alphabetic character while Docker names accept + // alphanumeric. + // Use the stricter convention for simplicity. + final Matcher m = ALPHABETIC.matcher(processName); + // Since we add sync-UUID as a suffix a couple of lines up, there will always be a substring + // starting with an alphabetic character. + // If the image name is a no-op, this function should always return `sync-UUID` at the minimum. + m.find(); + return processName.substring(m.start()); + } + } diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/process/KubeProcessFactoryTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/process/ProcessFactoryTest.java similarity index 65% rename from airbyte-workers/src/test/java/io/airbyte/workers/process/KubeProcessFactoryTest.java rename to airbyte-workers/src/test/java/io/airbyte/workers/process/ProcessFactoryTest.java index 32690a9e27950b..7701c58fb7d1d4 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/process/KubeProcessFactoryTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/process/ProcessFactoryTest.java @@ -7,18 +7,21 @@ import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -public class KubeProcessFactoryTest { +public class ProcessFactoryTest { @Test void getPodNameNormal() { - final var name = KubeProcessFactory.createPodName("airbyte/tester:1", "1", 10); + final var name = ProcessFactory.createProcessName("airbyte/tester:1", "1", 10, + KubeProcessFactory.KUBE_NAME_LEN_LIMIT); final var withoutRandSuffix = name.substring(0, name.length() - 5); Assertions.assertEquals("tester-sync-1-10-", withoutRandSuffix); } @Test void getPodNameTruncated() { - final var name = KubeProcessFactory.createPodName("airbyte/very-very-very-long-name-longer-than-63-chars:2", "1", 10); + final var name = + ProcessFactory.createProcessName("airbyte/very-very-very-long-name-longer-than-63-chars:2", + "1", 10, KubeProcessFactory.KUBE_NAME_LEN_LIMIT); final var withoutRandSuffix = name.substring(0, name.length() - 5); Assertions.assertEquals("very-very-very-long-name-longer-than-63-chars-sync-1-10-", withoutRandSuffix); } @@ -26,7 +29,8 @@ void getPodNameTruncated() { @Test void testHandlingDashAsFirstCharacter() { final var uuid = "7339ba3b-cb53-4210-9591-c70d4a372330"; - final var name = KubeProcessFactory.createPodName("airbyte/source-google-adwordsv2:latest", uuid, 10); + final var name = ProcessFactory.createProcessName("airbyte/source-google-adwordsv2:latest", uuid, + 10, KubeProcessFactory.KUBE_NAME_LEN_LIMIT); final var withoutRandSuffix = name.substring(0, name.length() - 5); Assertions.assertEquals("le-adwordsv2-sync-7339ba3b-cb53-4210-9591-c70d4a372330-10-", withoutRandSuffix); @@ -35,7 +39,8 @@ void testHandlingDashAsFirstCharacter() { @Test void testOnlyDashes() { final var uuid = "7339ba3b-cb53-4210-9591-c70d4a372330"; - final var name = KubeProcessFactory.createPodName("--------", uuid, 10); + final var name = ProcessFactory.createProcessName("--------", uuid, 10, + KubeProcessFactory.KUBE_NAME_LEN_LIMIT); final var withoutRandSuffix = name.substring(0, name.length() - 5); Assertions.assertEquals("sync-7339ba3b-cb53-4210-9591-c70d4a372330-10-", withoutRandSuffix); @@ -44,9 +49,9 @@ void testOnlyDashes() { @Test void testOnlyNumeric() { final var uuid = "7339ba3b-cb53-4210-9591-c70d4a372330"; - final var name = KubeProcessFactory.createPodName("0000000000", uuid, 10); + final var name = ProcessFactory.createProcessName("0000000000", uuid, 10, + KubeProcessFactory.KUBE_NAME_LEN_LIMIT); - System.out.println(name); final var withoutRandSuffix = name.substring(0, name.length() - 5); Assertions.assertEquals("sync-7339ba3b-cb53-4210-9591-c70d4a372330-10-", withoutRandSuffix); } From 7e79cacf77a54c3d58cc740ce59eccd91773dc97 Mon Sep 17 00:00:00 2001 From: Edmundo Ruiz Ghanem <168664+edmundito@users.noreply.github.com> Date: Tue, 3 May 2022 14:30:27 -0400 Subject: [PATCH 065/152] Fix new source / destination test connection success / failure tracking events (#12526) * Consolidate track new action analytics calls into hook * Move new source and destination test tracking to where the actual connection is being tested and remove from creation * Use consistent properties across all track new source actions * Make track action hook more generic and support new destination actions Update new destination actions with useTrackAction hook * Use connector_source_definition_id over connector_source_id for track new source actions --- .../src/core/analytics/AnalyticsService.ts | 2 +- .../src/hooks/services/useDestinationHook.tsx | 16 +----- .../src/hooks/services/useSourceHook.tsx | 19 ------- airbyte-webapp/src/hooks/useTrackAction.ts | 26 ++++++++++ .../components/DestinationForm.tsx | 9 ++-- .../components/DestinationStep.tsx | 7 ++- .../OnboardingPage/components/SourceStep.tsx | 9 ++-- .../components/SourceForm.tsx | 9 ++-- .../Connector/ConnectorCard/ConnectorCard.tsx | 51 ++++++++++++------- 9 files changed, 75 insertions(+), 73 deletions(-) create mode 100644 airbyte-webapp/src/hooks/useTrackAction.ts diff --git a/airbyte-webapp/src/core/analytics/AnalyticsService.ts b/airbyte-webapp/src/core/analytics/AnalyticsService.ts index ea0bf2a0af35c2..125a1887bb8942 100644 --- a/airbyte-webapp/src/core/analytics/AnalyticsService.ts +++ b/airbyte-webapp/src/core/analytics/AnalyticsService.ts @@ -11,7 +11,7 @@ export class AnalyticsService { reset = (): void => this.getSegmentAnalytics()?.reset?.(); - track = (name: string, properties: Record): void => + track =

>(name: string, properties: P): void => this.getSegmentAnalytics()?.track?.(name, { ...properties, ...this.context, diff --git a/airbyte-webapp/src/hooks/services/useDestinationHook.tsx b/airbyte-webapp/src/hooks/services/useDestinationHook.tsx index 8969c078bec825..8e01a6216938b1 100644 --- a/airbyte-webapp/src/hooks/services/useDestinationHook.tsx +++ b/airbyte-webapp/src/hooks/services/useDestinationHook.tsx @@ -64,8 +64,6 @@ const useCreateDestination = () => { const queryClient = useQueryClient(); const workspace = useCurrentWorkspace(); - const analyticsService = useAnalyticsService(); - return useMutation( async (createDestinationPayload: { values: ValuesProps; destinationConnector?: ConnectorProps }) => { const { values, destinationConnector } = createDestinationPayload; @@ -78,23 +76,11 @@ const useCreateDestination = () => { }); }, { - onSuccess: (data, ctx) => { - analyticsService.track("New Destination - Action", { - action: "Tested connector - success", - connector_destination: ctx.destinationConnector?.name, - connector_destination_definition_id: ctx.destinationConnector?.destinationDefinitionId, - }); + onSuccess: (data) => { queryClient.setQueryData(destinationsKeys.lists(), (lst: DestinationList | undefined) => ({ destinations: [data, ...(lst?.destinations ?? [])], })); }, - onError: (_, ctx) => { - analyticsService.track("New Destination - Action", { - action: "Tested connector - failure", - connector_destination: ctx.destinationConnector?.name, - connector_destination_definition_id: ctx.destinationConnector?.destinationDefinitionId, - }); - }, } ); }; diff --git a/airbyte-webapp/src/hooks/services/useSourceHook.tsx b/airbyte-webapp/src/hooks/services/useSourceHook.tsx index 8b51497e086102..cbde1807672207 100644 --- a/airbyte-webapp/src/hooks/services/useSourceHook.tsx +++ b/airbyte-webapp/src/hooks/services/useSourceHook.tsx @@ -68,17 +68,9 @@ const useCreateSource = () => { const queryClient = useQueryClient(); const workspace = useCurrentWorkspace(); - const analyticsService = useAnalyticsService(); - return useMutation( async (createSourcePayload: { values: ValuesProps; sourceConnector?: ConnectorProps }) => { const { values, sourceConnector } = createSourcePayload; - analyticsService.track("New Source - Action", { - action: "Test a connector", - connector_source: sourceConnector?.name, - connector_source_id: sourceConnector?.sourceDefinitionId, - }); - try { // Try to crete source const result = await service.create({ @@ -88,19 +80,8 @@ const useCreateSource = () => { connectionConfiguration: values.connectionConfiguration, }); - analyticsService.track("New Source - Action", { - action: "Tested connector - success", - connector_source: sourceConnector?.name, - connector_source_id: sourceConnector?.sourceDefinitionId, - }); - return result; } catch (e) { - analyticsService.track("New Source - Action", { - action: "Tested connector - failure", - connector_source: sourceConnector?.name, - connector_source_id: sourceConnector?.sourceDefinitionId, - }); throw e; } }, diff --git a/airbyte-webapp/src/hooks/useTrackAction.ts b/airbyte-webapp/src/hooks/useTrackAction.ts new file mode 100644 index 00000000000000..8fa44dc179fbe1 --- /dev/null +++ b/airbyte-webapp/src/hooks/useTrackAction.ts @@ -0,0 +1,26 @@ +import { useCallback } from "react"; + +import { useAnalyticsService } from "./services/Analytics/useAnalyticsService"; + +export const enum TrackActionType { + NEW_SOURCE = "New Source", + NEW_DESTINATION = "New Destination", +} + +interface TrackActionProperties { + connector_source?: string; + connector_source_definition_id?: string; + connector_destination?: string; + connector_destination_definition_id?: string; +} + +export const useTrackAction = (type: TrackActionType) => { + const analyticsService = useAnalyticsService(); + + return useCallback( + (action: string, properties: TrackActionProperties) => { + analyticsService.track(`${type} - Action`, { action, ...properties }); + }, + [analyticsService, type] + ); +}; diff --git a/airbyte-webapp/src/pages/DestinationPage/pages/CreateDestinationPage/components/DestinationForm.tsx b/airbyte-webapp/src/pages/DestinationPage/pages/CreateDestinationPage/components/DestinationForm.tsx index 9d96a426c163f5..b6626906b375aa 100644 --- a/airbyte-webapp/src/pages/DestinationPage/pages/CreateDestinationPage/components/DestinationForm.tsx +++ b/airbyte-webapp/src/pages/DestinationPage/pages/CreateDestinationPage/components/DestinationForm.tsx @@ -4,8 +4,8 @@ import { FormattedMessage } from "react-intl"; import { ConnectionConfiguration } from "core/domain/connection"; import { DestinationDefinition } from "core/domain/connector"; import { LogsRequestError } from "core/request/LogsRequestError"; -import { useAnalyticsService } from "hooks/services/Analytics/useAnalyticsService"; import useRouter from "hooks/useRouter"; +import { TrackActionType, useTrackAction } from "hooks/useTrackAction"; import { useGetDestinationDefinitionSpecificationAsync } from "services/connector/DestinationDefinitionSpecificationService"; import { createFormErrorMessage } from "utils/errorStatusMessage"; import { ConnectorCard } from "views/Connector/ConnectorCard"; @@ -39,7 +39,7 @@ const DestinationForm: React.FC = ({ afterSelectConnector, }) => { const { location } = useRouter(); - const analyticsService = useAnalyticsService(); + const trackNewDestinationAction = useTrackAction(TrackActionType.NEW_DESTINATION); const [destinationDefinitionId, setDestinationDefinitionId] = useState( hasDestinationDefinitionId(location.state) ? location.state.destinationDefinitionId : null @@ -58,9 +58,8 @@ const DestinationForm: React.FC = ({ afterSelectConnector(); } - analyticsService.track("New Destination - Action", { - action: "Select a connector", - connector_destination_definition: connector?.name, + trackNewDestinationAction("Select a connector", { + connector_destination: connector?.name, connector_destination_definition_id: destinationDefinitionId, }); }; diff --git a/airbyte-webapp/src/pages/OnboardingPage/components/DestinationStep.tsx b/airbyte-webapp/src/pages/OnboardingPage/components/DestinationStep.tsx index 571e784df22b65..01dab0e897964a 100644 --- a/airbyte-webapp/src/pages/OnboardingPage/components/DestinationStep.tsx +++ b/airbyte-webapp/src/pages/OnboardingPage/components/DestinationStep.tsx @@ -3,8 +3,8 @@ import { FormattedMessage } from "react-intl"; import { ConnectionConfiguration } from "core/domain/connection"; import { JobInfo } from "core/domain/job"; -import { useAnalyticsService } from "hooks/services/Analytics/useAnalyticsService"; import { useCreateDestination } from "hooks/services/useDestinationHook"; +import { TrackActionType, useTrackAction } from "hooks/useTrackAction"; import { useDestinationDefinitionList } from "services/connector/DestinationDefinitionService"; import { useGetDestinationDefinitionSpecificationAsync } from "services/connector/DestinationDefinitionSpecificationService"; import { createFormErrorMessage } from "utils/errorStatusMessage"; @@ -31,7 +31,7 @@ const DestinationStep: React.FC = ({ onNextStep, onSuccess }) => { } | null>(null); const { mutateAsync: createDestination } = useCreateDestination(); - const analyticsService = useAnalyticsService(); + const trackNewDestinationAction = useTrackAction(TrackActionType.NEW_DESTINATION); const getDestinationDefinitionById = (id: string) => destinationDefinitions.find((item) => item.destinationDefinitionId === id); @@ -64,8 +64,7 @@ const DestinationStep: React.FC = ({ onNextStep, onSuccess }) => { const onDropDownSelect = (destinationDefinitionId: string) => { const destinationConnector = getDestinationDefinitionById(destinationDefinitionId); - analyticsService.track("New Destination - Action", { - action: "Select a connector", + trackNewDestinationAction("Select a connector", { connector_destination: destinationConnector?.name, connector_destination_definition_id: destinationConnector?.destinationDefinitionId, }); diff --git a/airbyte-webapp/src/pages/OnboardingPage/components/SourceStep.tsx b/airbyte-webapp/src/pages/OnboardingPage/components/SourceStep.tsx index bbe0b95f619c69..30873022fd77d0 100644 --- a/airbyte-webapp/src/pages/OnboardingPage/components/SourceStep.tsx +++ b/airbyte-webapp/src/pages/OnboardingPage/components/SourceStep.tsx @@ -4,8 +4,8 @@ import { FormattedMessage } from "react-intl"; import { ConnectionConfiguration } from "core/domain/connection"; import { JobInfo } from "core/domain/job"; import { LogsRequestError } from "core/request/LogsRequestError"; -import { useAnalyticsService } from "hooks/services/Analytics/useAnalyticsService"; import { useCreateSource } from "hooks/services/useSourceHook"; +import { TrackActionType, useTrackAction } from "hooks/useTrackAction"; import { useSourceDefinitionList } from "services/connector/SourceDefinitionService"; import { useGetSourceDefinitionSpecificationAsync } from "services/connector/SourceDefinitionSpecificationService"; import { createFormErrorMessage } from "utils/errorStatusMessage"; @@ -31,7 +31,7 @@ const SourceStep: React.FC = ({ onNextStep, onSuccess }) => { const { mutateAsync: createSource } = useCreateSource(); - const analyticsService = useAnalyticsService(); + const trackNewSourceAction = useTrackAction(TrackActionType.NEW_SOURCE); const getSourceDefinitionById = (id: string) => sourceDefinitions.find((item) => item.sourceDefinitionId === id); @@ -64,10 +64,9 @@ const SourceStep: React.FC = ({ onNextStep, onSuccess }) => { const onServiceSelect = (sourceId: string) => { const sourceDefinition = getSourceDefinitionById(sourceId); - analyticsService.track("New Source - Action", { - action: "Select a connector", + trackNewSourceAction("Select a connector", { connector_source: sourceDefinition?.name, - connector_source_id: sourceDefinition?.sourceDefinitionId, + connector_source_definition_id: sourceDefinition?.sourceDefinitionId, }); setError(null); diff --git a/airbyte-webapp/src/pages/SourcesPage/pages/CreateSourcePage/components/SourceForm.tsx b/airbyte-webapp/src/pages/SourcesPage/pages/CreateSourcePage/components/SourceForm.tsx index d12423da874750..66bc00e4ce4510 100644 --- a/airbyte-webapp/src/pages/SourcesPage/pages/CreateSourcePage/components/SourceForm.tsx +++ b/airbyte-webapp/src/pages/SourcesPage/pages/CreateSourcePage/components/SourceForm.tsx @@ -4,8 +4,8 @@ import { FormattedMessage } from "react-intl"; import { ConnectionConfiguration } from "core/domain/connection"; import { SourceDefinition } from "core/domain/connector"; import { LogsRequestError } from "core/request/LogsRequestError"; -import { useAnalyticsService } from "hooks/services/Analytics/useAnalyticsService"; import useRouter from "hooks/useRouter"; +import { TrackActionType, useTrackAction } from "hooks/useTrackAction"; import { useGetSourceDefinitionSpecificationAsync } from "services/connector/SourceDefinitionSpecificationService"; import { createFormErrorMessage } from "utils/errorStatusMessage"; import { ConnectorCard } from "views/Connector/ConnectorCard"; @@ -34,7 +34,7 @@ const hasSourceDefinitionId = (state: unknown): state is { sourceDefinitionId: s const SourceForm: React.FC = ({ onSubmit, sourceDefinitions, error, hasSuccess, afterSelectConnector }) => { const { location } = useRouter(); - const analyticsService = useAnalyticsService(); + const trackNewSourceAction = useTrackAction(TrackActionType.NEW_SOURCE); const [sourceDefinitionId, setSourceDefinitionId] = useState( hasSourceDefinitionId(location.state) ? location.state.sourceDefinitionId : null @@ -54,9 +54,8 @@ const SourceForm: React.FC = ({ onSubmit, sourceDefinitions, error, hasS afterSelectConnector(); } - analyticsService.track("New Source - Action", { - action: "Select a connector", - connector_source_definition: connector?.name, + trackNewSourceAction("Select a connector", { + connector_source: connector?.name, connector_source_definition_id: sourceDefinitionId, }); }; diff --git a/airbyte-webapp/src/views/Connector/ConnectorCard/ConnectorCard.tsx b/airbyte-webapp/src/views/Connector/ConnectorCard/ConnectorCard.tsx index 952b0ac1526e7e..a6203af3205c99 100644 --- a/airbyte-webapp/src/views/Connector/ConnectorCard/ConnectorCard.tsx +++ b/airbyte-webapp/src/views/Connector/ConnectorCard/ConnectorCard.tsx @@ -7,7 +7,7 @@ import JobItem from "components/JobItem"; import { Connector, ConnectorT, Scheduler } from "core/domain/connector"; import { JobInfo } from "core/domain/job/Job"; import { LogsRequestError } from "core/request/LogsRequestError"; -import { useAnalytics } from "hooks/services/Analytics"; +import { TrackActionType, useTrackAction } from "hooks/useTrackAction"; import { createFormErrorMessage } from "utils/errorStatusMessage"; import { ServiceForm, ServiceFormProps, ServiceFormValues } from "views/Connector/ServiceForm"; @@ -39,33 +39,46 @@ const ConnectorCard: React.FC< const { testConnector, isTestConnectionInProgress, onStopTesting, error } = useTestConnector(props); - const analyticsService = useAnalytics().service; + const trackNewSourceAction = useTrackAction(TrackActionType.NEW_SOURCE); + const trackNewDestinationAction = useTrackAction(TrackActionType.NEW_DESTINATION); const onHandleSubmit = async (values: ServiceFormValues) => { setErrorStatusRequest(null); const connector = props.availableServices.find((item) => Connector.id(item) === values.serviceType); - try { - if (connector) { - if (props.formType === "source") { - analyticsService.track("New Source - Action", { - action: "Test a connector", - connector_source: connector?.name, - connector_source_definition_id: Connector.id(connector), - }); - } else { - analyticsService.track("New Destination - Action", { - action: "Test a connector", - connector_destination: connector?.name, - connector_destination_definition_id: Connector.id(connector), - }); - } + const trackAction = (action: string) => { + if (!connector) { + return; } - await testConnector(values); - await onSubmit(values); + if (props.formType === "source") { + trackNewSourceAction(action, { + connector_source: connector?.name, + connector_source_definition_id: Connector.id(connector), + }); + } else { + trackNewDestinationAction(action, { + connector_destination: connector?.name, + connector_destination_definition_id: Connector.id(connector), + }); + } + }; + const testConnectorWithTracking = async () => { + trackAction("Test a connector"); + try { + await testConnector(values); + trackAction("Tested connector - success"); + } catch (e) { + trackAction("Tested connector - failure"); + throw e; + } + }; + + try { + await testConnectorWithTracking(); + await onSubmit(values); setSaved(true); } catch (e) { setErrorStatusRequest(e); From 265eddcc085bb40a92a477c052a40a09d79fb73e Mon Sep 17 00:00:00 2001 From: Ari Bajo Date: Tue, 3 May 2022 21:25:18 +0200 Subject: [PATCH 066/152] Add Google Analytics tracking to docs (#12418) * Configure plugin-google-gtag * Fix curly brackets --- docusaurus/docusaurus.config.js | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docusaurus/docusaurus.config.js b/docusaurus/docusaurus.config.js index 7b51868d750bf9..2370a97cc9c48e 100644 --- a/docusaurus/docusaurus.config.js +++ b/docusaurus/docusaurus.config.js @@ -40,6 +40,10 @@ const config = { theme: { customCss: require.resolve('./src/css/custom.css'), }, + gtag: { + trackingID: 'UA-156258629-2', + anonymizeIP: true, + }, }), ], ], From e15ae56389dd63c8161df342b732b4fccf736003 Mon Sep 17 00:00:00 2001 From: LiRen Tu Date: Tue, 3 May 2022 13:45:02 -0700 Subject: [PATCH 067/152] Close all unsafe queries (#12495) * Add helper methods to return lists of json nodes * Close all unsafe queries * Add one more helper method * Simplify helper names * Format code --- .../java/io/airbyte/db/jdbc/JdbcDatabase.java | 46 ++++++++++++++--- .../db/jdbc/TestDefaultJdbcDatabase.java | 29 +++++------ .../db/jdbc/TestStreamingJdbcDatabase.java | 51 +++++++++---------- ...estinationStrictEncryptAcceptanceTest.java | 17 +++---- .../ClickhouseDestinationAcceptanceTest.java | 17 +++---- ...shClickhouseDestinationAcceptanceTest.java | 22 ++++---- .../MariadbColumnstoreSqlOperations.java | 25 ++++----- ...bColumnstoreDestinationAcceptanceTest.java | 33 +++++------- .../destination/mysql/MySQLSqlOperations.java | 17 +++---- ...trictEncryptDestinationAcceptanceTest.java | 16 +++--- .../NneOracleDestinationAcceptanceTest.java | 9 ++-- ...ryptedOracleDestinationAcceptanceTest.java | 4 +- .../source/clickhouse/ClickHouseSource.java | 8 ++- .../source/cockroachdb/CockroachDbSource.java | 8 +-- .../Db2Source.java | 8 +-- .../source/mssql/MssqlSource.java | 43 ++++++++-------- .../source/mysql/MySqlCdcTargetPosition.java | 28 +++++----- .../mysql/helpers/CdcConfigurationHelper.java | 50 +++++++++--------- .../OracleSourceNneAcceptanceTest.java | 8 +-- .../oracle/OracleSourceNneAcceptanceTest.java | 19 +++---- .../source/postgres/PostgresSource.java | 24 ++++----- 21 files changed, 236 insertions(+), 246 deletions(-) diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/jdbc/JdbcDatabase.java b/airbyte-db/lib/src/main/java/io/airbyte/db/jdbc/JdbcDatabase.java index 2170c847bd4559..2ace4f22321713 100644 --- a/airbyte-db/lib/src/main/java/io/airbyte/db/jdbc/JdbcDatabase.java +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/jdbc/JdbcDatabase.java @@ -44,9 +44,7 @@ public JdbcDatabase(final JdbcCompatibleSourceOperations sourceOperations) { @Override public void execute(final String sql) throws SQLException { - execute(connection -> { - connection.createStatement().execute(sql); - }); + execute(connection -> connection.createStatement().execute(sql)); } public void executeWithinTransaction(final List queries) throws SQLException { @@ -127,6 +125,18 @@ public abstract Stream unsafeResultSetQuery(CheckedFunction recordTransform) throws SQLException; + /** + * String query is a common use case for {@link JdbcDatabase#unsafeResultSetQuery}. So this method + * is created as syntactic sugar. + */ + public List queryStrings(final CheckedFunction query, + final CheckedFunction recordTransform) + throws SQLException { + try (final Stream stream = unsafeResultSetQuery(query, recordTransform)) { + return stream.toList(); + } + } + /** * Use a connection to create a {@link PreparedStatement} and map it into a stream. You CANNOT * assume that data will be returned from this method before the entire {@link ResultSet} is @@ -148,8 +158,21 @@ public abstract Stream unsafeQuery(CheckedFunction recordTransform) throws SQLException; + /** + * Json query is a common use case for + * {@link JdbcDatabase#unsafeQuery(CheckedFunction, CheckedFunction)}. So this method is created as + * syntactic sugar. + */ + public List queryJsons(final CheckedFunction statementCreator, + final CheckedFunction recordTransform) + throws SQLException { + try (final Stream stream = unsafeQuery(statementCreator, recordTransform)) { + return stream.toList(); + } + } + public int queryInt(final String sql, final String... params) throws SQLException { - try (final Stream q = unsafeQuery(c -> { + try (final Stream stream = unsafeQuery(c -> { PreparedStatement statement = c.prepareStatement(sql); int i = 1; for (String param : params) { @@ -157,9 +180,8 @@ public int queryInt(final String sql, final String... params) throws SQLExceptio ++i; } return statement; - }, - rs -> rs.getInt(1))) { - return q.findFirst().get(); + }, rs -> rs.getInt(1))) { + return stream.findFirst().get(); } } @@ -181,6 +203,16 @@ public Stream unsafeQuery(final String sql, final String... params) th }, sourceOperations::rowToJson); } + /** + * Json query is a common use case for {@link JdbcDatabase#unsafeQuery(String, String...)}. So this + * method is created as syntactic sugar. + */ + public List queryJsons(final String sql, final String... params) throws SQLException { + try (final Stream stream = unsafeQuery(sql, params)) { + return stream.toList(); + } + } + public ResultSetMetaData queryMetadata(final String sql, final String... params) throws SQLException { try (final Stream q = unsafeQuery(c -> { PreparedStatement statement = c.prepareStatement(sql); diff --git a/airbyte-db/lib/src/test/java/io/airbyte/db/jdbc/TestDefaultJdbcDatabase.java b/airbyte-db/lib/src/test/java/io/airbyte/db/jdbc/TestDefaultJdbcDatabase.java index 43448e3d44d049..0661c5b29af72e 100644 --- a/airbyte-db/lib/src/test/java/io/airbyte/db/jdbc/TestDefaultJdbcDatabase.java +++ b/airbyte-db/lib/src/test/java/io/airbyte/db/jdbc/TestDefaultJdbcDatabase.java @@ -16,7 +16,6 @@ import io.airbyte.test.utils.PostgreSQLContainerHelper; import java.sql.SQLException; import java.util.List; -import java.util.stream.Collectors; import java.util.stream.Stream; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; @@ -34,9 +33,8 @@ public class TestDefaultJdbcDatabase { Jsons.jsonNode(ImmutableMap.of("id", 3, "name", "vash"))); private static PostgreSQLContainer PSQL_DB; - - private JdbcDatabase database; private final JdbcSourceOperations sourceOperations = JdbcUtils.getDefaultSourceOperations(); + private JdbcDatabase database; @BeforeAll static void init() { @@ -44,6 +42,11 @@ static void init() { PSQL_DB.start(); } + @AfterAll + static void cleanUp() { + PSQL_DB.close(); + } + @BeforeEach void setup() throws Exception { final String dbName = Strings.addRandomSuffix("db", "_", 10); @@ -65,11 +68,6 @@ void close() throws Exception { database.close(); } - @AfterAll - static void cleanUp() { - PSQL_DB.close(); - } - @Test void testBufferedResultQuery() throws SQLException { final List actual = database.bufferedResultSetQuery( @@ -81,22 +79,19 @@ void testBufferedResultQuery() throws SQLException { @Test void testResultSetQuery() throws SQLException { - final Stream actual = database.unsafeResultSetQuery( + try (final Stream actual = database.unsafeResultSetQuery( connection -> connection.createStatement().executeQuery("SELECT * FROM id_and_name;"), - sourceOperations::rowToJson); - final List actualAsList = actual.collect(Collectors.toList()); - actual.close(); - - assertEquals(RECORDS_AS_JSON, actualAsList); + sourceOperations::rowToJson)) { + assertEquals(RECORDS_AS_JSON, actual.toList()); + } } @Test void testQuery() throws SQLException { - final Stream actual = database.unsafeQuery( + final List actual = database.queryJsons( connection -> connection.prepareStatement("SELECT * FROM id_and_name;"), sourceOperations::rowToJson); - - assertEquals(RECORDS_AS_JSON, actual.collect(Collectors.toList())); + assertEquals(RECORDS_AS_JSON, actual); } private JdbcDatabase getDatabaseFromConfig(final JsonNode config) { diff --git a/airbyte-db/lib/src/test/java/io/airbyte/db/jdbc/TestStreamingJdbcDatabase.java b/airbyte-db/lib/src/test/java/io/airbyte/db/jdbc/TestStreamingJdbcDatabase.java index e5e8b350755112..cac812dd71cda1 100644 --- a/airbyte-db/lib/src/test/java/io/airbyte/db/jdbc/TestStreamingJdbcDatabase.java +++ b/airbyte-db/lib/src/test/java/io/airbyte/db/jdbc/TestStreamingJdbcDatabase.java @@ -24,7 +24,6 @@ import java.util.List; import java.util.Set; import java.util.concurrent.atomic.AtomicReference; -import java.util.stream.Stream; import org.apache.commons.dbcp2.BasicDataSource; import org.elasticsearch.common.collect.Map; import org.junit.jupiter.api.AfterAll; @@ -95,20 +94,17 @@ void testQuery() throws SQLException { // invoked. final AtomicReference connection1 = new AtomicReference<>(); final AtomicReference ps1 = new AtomicReference<>(); - try (final Stream actual = streamingJdbcDatabase.unsafeQuery( - connection -> { - connection1.set(connection); - final PreparedStatement ps = connection.prepareStatement("SELECT * FROM id_and_name;"); - ps1.set(ps); - return ps; - }, - sourceOperations::rowToJson)) { - final List expectedRecords = Lists.newArrayList( - Jsons.jsonNode(Map.of("id", 1, "name", "picard")), - Jsons.jsonNode(Map.of("id", 2, "name", "crusher")), - Jsons.jsonNode(Map.of("id", 3, "name", "vash"))); - assertEquals(expectedRecords, actual.toList()); - } + final List actual = streamingJdbcDatabase.queryJsons(connection -> { + connection1.set(connection); + final PreparedStatement ps = connection.prepareStatement("SELECT * FROM id_and_name;"); + ps1.set(ps); + return ps; + }, sourceOperations::rowToJson); + final List expectedRecords = Lists.newArrayList( + Jsons.jsonNode(Map.of("id", 1, "name", "picard")), + Jsons.jsonNode(Map.of("id", 2, "name", "crusher")), + Jsons.jsonNode(Map.of("id", 3, "name", "vash"))); + assertEquals(expectedRecords, actual); } /** @@ -131,7 +127,7 @@ void testLargeRow() throws SQLException { final AtomicReference connection1 = new AtomicReference<>(); final AtomicReference ps1 = new AtomicReference<>(); final Set fetchSizes = new HashSet<>(); - try (final Stream actual = streamingJdbcDatabase.unsafeQuery( + final List actual = streamingJdbcDatabase.queryJsons( connection -> { connection1.set(connection); final PreparedStatement ps = connection.prepareStatement("SELECT * FROM id_and_name;"); @@ -141,18 +137,17 @@ void testLargeRow() throws SQLException { resultSet -> { fetchSizes.add(resultSet.getFetchSize()); return sourceOperations.rowToJson(resultSet); - })) { - assertEquals(20, actual.count()); - - // Two fetch sizes should be set on the result set, one is the initial sample size, - // and the other is smaller than the initial value because of the large row. - // This check assumes that FetchSizeConstants.TARGET_BUFFER_BYTE_SIZE = 200 MB. - // Update this check if the buffer size constant is changed. - assertEquals(2, fetchSizes.size()); - final List sortedSizes = fetchSizes.stream().sorted().toList(); - assertTrue(sortedSizes.get(0) < FetchSizeConstants.INITIAL_SAMPLE_SIZE); - assertEquals(FetchSizeConstants.INITIAL_SAMPLE_SIZE, sortedSizes.get(1)); - } + }); + assertEquals(20, actual.size()); + + // Two fetch sizes should be set on the result set, one is the initial sample size, + // and the other is smaller than the initial value because of the large row. + // This check assumes that FetchSizeConstants.TARGET_BUFFER_BYTE_SIZE = 200 MB. + // Update this check if the buffer size constant is changed. + assertEquals(2, fetchSizes.size()); + final List sortedSizes = fetchSizes.stream().sorted().toList(); + assertTrue(sortedSizes.get(0) < FetchSizeConstants.INITIAL_SAMPLE_SIZE); + assertEquals(FetchSizeConstants.INITIAL_SAMPLE_SIZE, sortedSizes.get(1)); } private JsonNode getConfig(final PostgreSQLContainer psqlDb, final String dbName) { diff --git a/airbyte-integrations/connectors/destination-clickhouse-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/clickhouse/ClickhouseDestinationStrictEncryptAcceptanceTest.java b/airbyte-integrations/connectors/destination-clickhouse-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/clickhouse/ClickhouseDestinationStrictEncryptAcceptanceTest.java index d423eb2f9a6db8..ee22f15849f7ee 100644 --- a/airbyte-integrations/connectors/destination-clickhouse-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/clickhouse/ClickhouseDestinationStrictEncryptAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-clickhouse-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/clickhouse/ClickhouseDestinationStrictEncryptAcceptanceTest.java @@ -97,10 +97,10 @@ protected List retrieveNormalizedRecords(final TestDestinationEnv test } @Override - protected List retrieveRecords(TestDestinationEnv testEnv, - String streamName, - String namespace, - JsonNode streamSchema) + protected List retrieveRecords(final TestDestinationEnv testEnv, + final String streamName, + final String namespace, + final JsonNode streamSchema) throws Exception { return retrieveRecordsFromTable(namingResolver.getRawTableName(streamName), namespace) .stream() @@ -110,9 +110,8 @@ protected List retrieveRecords(TestDestinationEnv testEnv, private List retrieveRecordsFromTable(final String tableName, final String schemaName) throws SQLException { final JdbcDatabase jdbcDB = getDatabase(getConfig()); - return jdbcDB.unsafeQuery(String.format("SELECT * FROM %s.%s ORDER BY %s ASC", schemaName, tableName, - JavaBaseConstants.COLUMN_NAME_EMITTED_AT)) - .collect(Collectors.toList()); + final String query = String.format("SELECT * FROM %s.%s ORDER BY %s ASC", schemaName, tableName, JavaBaseConstants.COLUMN_NAME_EMITTED_AT); + return jdbcDB.queryJsons(query); } @Override @@ -141,7 +140,7 @@ private static JdbcDatabase getDatabase(final JsonNode config) { } @Override - protected void setup(TestDestinationEnv testEnv) { + protected void setup(final TestDestinationEnv testEnv) { db = (ClickHouseContainer) new ClickHouseContainer("yandex/clickhouse-server") .withExposedPorts(HTTP_PORT, NATIVE_PORT, HTTPS_PORT, NATIVE_SECURE_PORT) .withClasspathResourceMapping("config.xml", "/etc/clickhouse-server/config.xml", BindMode.READ_ONLY) @@ -156,7 +155,7 @@ protected void setup(TestDestinationEnv testEnv) { } @Override - protected void tearDown(TestDestinationEnv testEnv) { + protected void tearDown(final TestDestinationEnv testEnv) { db.stop(); db.close(); } diff --git a/airbyte-integrations/connectors/destination-clickhouse/src/test-integration/java/io/airbyte/integrations/destination/clickhouse/ClickhouseDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-clickhouse/src/test-integration/java/io/airbyte/integrations/destination/clickhouse/ClickhouseDestinationAcceptanceTest.java index d897950a23edd1..a1a16aff1bcbab 100644 --- a/airbyte-integrations/connectors/destination-clickhouse/src/test-integration/java/io/airbyte/integrations/destination/clickhouse/ClickhouseDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-clickhouse/src/test-integration/java/io/airbyte/integrations/destination/clickhouse/ClickhouseDestinationAcceptanceTest.java @@ -88,10 +88,10 @@ protected List retrieveNormalizedRecords(final TestDestinationEnv test } @Override - protected List retrieveRecords(TestDestinationEnv testEnv, - String streamName, - String namespace, - JsonNode streamSchema) + protected List retrieveRecords(final TestDestinationEnv testEnv, + final String streamName, + final String namespace, + final JsonNode streamSchema) throws Exception { return retrieveRecordsFromTable(namingResolver.getRawTableName(streamName), namespace) .stream() @@ -101,9 +101,8 @@ protected List retrieveRecords(TestDestinationEnv testEnv, private List retrieveRecordsFromTable(final String tableName, final String schemaName) throws SQLException { final JdbcDatabase jdbcDB = getDatabase(getConfig()); - return jdbcDB.unsafeQuery(String.format("SELECT * FROM %s.%s ORDER BY %s ASC", schemaName, tableName, - JavaBaseConstants.COLUMN_NAME_EMITTED_AT)) - .collect(Collectors.toList()); + final String query = String.format("SELECT * FROM %s.%s ORDER BY %s ASC", schemaName, tableName, JavaBaseConstants.COLUMN_NAME_EMITTED_AT); + return jdbcDB.queryJsons(query); } @Override @@ -131,13 +130,13 @@ private static JdbcDatabase getDatabase(final JsonNode config) { } @Override - protected void setup(TestDestinationEnv testEnv) { + protected void setup(final TestDestinationEnv testEnv) { db = new ClickHouseContainer("yandex/clickhouse-server"); db.start(); } @Override - protected void tearDown(TestDestinationEnv testEnv) { + protected void tearDown(final TestDestinationEnv testEnv) { db.stop(); db.close(); } diff --git a/airbyte-integrations/connectors/destination-clickhouse/src/test-integration/java/io/airbyte/integrations/destination/clickhouse/SshClickhouseDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-clickhouse/src/test-integration/java/io/airbyte/integrations/destination/clickhouse/SshClickhouseDestinationAcceptanceTest.java index dd6b640fdfb86d..a6ab946dfdb7ba 100644 --- a/airbyte-integrations/connectors/destination-clickhouse/src/test-integration/java/io/airbyte/integrations/destination/clickhouse/SshClickhouseDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-clickhouse/src/test-integration/java/io/airbyte/integrations/destination/clickhouse/SshClickhouseDestinationAcceptanceTest.java @@ -6,7 +6,6 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; -import io.airbyte.commons.functional.CheckedFunction; import io.airbyte.commons.json.Jsons; import io.airbyte.db.Databases; import io.airbyte.db.jdbc.JdbcDatabase; @@ -86,10 +85,10 @@ protected List retrieveNormalizedRecords(final TestDestinationEnv test } @Override - protected List retrieveRecords(TestDestinationEnv testEnv, - String streamName, - String namespace, - JsonNode streamSchema) + protected List retrieveRecords(final TestDestinationEnv testEnv, + final String streamName, + final String namespace, + final JsonNode streamSchema) throws Exception { return retrieveRecordsFromTable(namingResolver.getRawTableName(streamName), namespace) .stream() @@ -102,10 +101,11 @@ private List retrieveRecordsFromTable(final String tableName, final St getConfig(), ClickhouseDestination.HOST_KEY, ClickhouseDestination.PORT_KEY, - (CheckedFunction, Exception>) mangledConfig -> getDatabase(mangledConfig) - .unsafeQuery(String.format("SELECT * FROM %s.%s ORDER BY %s ASC", schemaName, tableName, - JavaBaseConstants.COLUMN_NAME_EMITTED_AT)) - .collect(Collectors.toList())); + mangledConfig -> { + final JdbcDatabase database = getDatabase(mangledConfig); + final String query = String.format("SELECT * FROM %s.%s ORDER BY %s ASC", schemaName, tableName, JavaBaseConstants.COLUMN_NAME_EMITTED_AT); + return database.queryJsons(query); + }); } @Override @@ -133,14 +133,14 @@ private static JdbcDatabase getDatabase(final JsonNode config) { } @Override - protected void setup(TestDestinationEnv testEnv) { + protected void setup(final TestDestinationEnv testEnv) { bastion.initAndStartBastion(); db = (ClickHouseContainer) new ClickHouseContainer("yandex/clickhouse-server").withNetwork(bastion.getNetWork()); db.start(); } @Override - protected void tearDown(TestDestinationEnv testEnv) { + protected void tearDown(final TestDestinationEnv testEnv) { bastion.stopAndCloseContainers(db); } diff --git a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/main/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreSqlOperations.java b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/main/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreSqlOperations.java index 4460ca17e08519..6dd6c3abc753b0 100644 --- a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/main/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreSqlOperations.java +++ b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/main/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreSqlOperations.java @@ -17,13 +17,9 @@ import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; -import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; public class MariadbColumnstoreSqlOperations extends JdbcSqlOperations { - private static final Logger LOGGER = LoggerFactory.getLogger(MariadbColumnstoreSqlOperations.class); private final String MINIMUM_VERSION = "5.5.3"; Pattern VERSION_PATTERN = Pattern.compile("^(\\d+\\.\\d+\\.\\d+)-MariaDB"); private boolean isLocalFileEnabled = false; @@ -70,7 +66,7 @@ public void insertRecordsInternal(final JdbcDatabase database, @Override public void executeTransaction(final JdbcDatabase database, final List queries) throws Exception { database.execute(connection -> { - try (Statement stmt = connection.createStatement()) { + try (final Statement stmt = connection.createStatement()) { stmt.addBatch("BEGIN;"); for (final String query : queries) { stmt.addBatch(query); @@ -103,13 +99,15 @@ VersionCompatibility isCompatibleVersion(final JdbcDatabase database) throws SQL } private Semver getVersion(final JdbcDatabase database) throws SQLException { - final List value = database.unsafeResultSetQuery(connection -> connection.createStatement().executeQuery("SELECT version()"), - resultSet -> resultSet.getString("version()")).collect(Collectors.toList()); - Matcher matcher = VERSION_PATTERN.matcher(value.get(0)); + final List versions = database.queryStrings( + connection -> connection.createStatement().executeQuery("SELECT version()"), + resultSet -> resultSet.getString("version()")); + + final Matcher matcher = VERSION_PATTERN.matcher(versions.get(0)); if (matcher.find()) { return new Semver(matcher.group(1)); } else { - throw new RuntimeException(String.format("Unexpected version string: %s\nExpected version format is X.X.X-MariaDB", value.get(0))); + throw new RuntimeException(String.format("Unexpected version string: %s\nExpected version format is X.X.X-MariaDB", versions.get(0))); } } @@ -122,11 +120,10 @@ void verifyLocalFileEnabled(final JdbcDatabase database) throws SQLException { } private boolean checkIfLocalFileIsEnabled(final JdbcDatabase database) throws SQLException { - final List value = - database.unsafeResultSetQuery(connection -> connection.createStatement().executeQuery("SHOW GLOBAL VARIABLES LIKE 'local_infile'"), - resultSet -> resultSet.getString("Value")).collect(Collectors.toList()); - - return value.get(0).equalsIgnoreCase("on"); + final List localFiles = database.queryStrings( + connection -> connection.createStatement().executeQuery("SHOW GLOBAL VARIABLES LIKE 'local_infile'"), + resultSet -> resultSet.getString("Value")); + return localFiles.get(0).equalsIgnoreCase("on"); } private void tryEnableLocalFile(final JdbcDatabase database) throws SQLException { diff --git a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestinationAcceptanceTest.java index 8098ab53ae4564..c2b193d22805d4 100644 --- a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestinationAcceptanceTest.java @@ -17,19 +17,13 @@ import java.sql.SQLException; import java.util.List; import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.testcontainers.containers.MariaDBContainer; import org.testcontainers.utility.DockerImageName; public class MariadbColumnstoreDestinationAcceptanceTest extends DestinationAcceptanceTest { - private static final Logger LOGGER = LoggerFactory.getLogger(MariadbColumnstoreDestinationAcceptanceTest.class); - private final ExtendedNameTransformer namingResolver = new MariadbColumnstoreNameTransformer(); - private JsonNode configJson; - private MariaDBContainer db; @Override @@ -89,10 +83,10 @@ protected boolean supportObjectDataTypeTest() { } @Override - protected List retrieveRecords(TestDestinationEnv testEnv, - String streamName, - String namespace, - JsonNode streamSchema) + protected List retrieveRecords(final TestDestinationEnv testEnv, + final String streamName, + final String namespace, + final JsonNode streamSchema) throws Exception { return retrieveRecordsFromTable(namingResolver.getRawTableName(streamName), namespace) .stream() @@ -101,10 +95,9 @@ protected List retrieveRecords(TestDestinationEnv testEnv, } private List retrieveRecordsFromTable(final String tableName, final String schemaName) throws SQLException { - JdbcDatabase database = getDatabase(getConfig()); - return database.unsafeQuery(String.format("SELECT * FROM %s.%s ORDER BY %s ASC;", schemaName, tableName, - JavaBaseConstants.COLUMN_NAME_EMITTED_AT)) - .collect(Collectors.toList()); + final JdbcDatabase database = getDatabase(getConfig()); + final String query = String.format("SELECT * FROM %s.%s ORDER BY %s ASC;", schemaName, tableName, JavaBaseConstants.COLUMN_NAME_EMITTED_AT); + return database.queryJsons(query); } private static JdbcDatabase getDatabase(final JsonNode config) { @@ -119,19 +112,19 @@ private static JdbcDatabase getDatabase(final JsonNode config) { } @Override - protected void setup(TestDestinationEnv testEnv) throws Exception { - DockerImageName mcsImage = DockerImageName.parse("fengdi/columnstore:1.5.2").asCompatibleSubstituteFor("mariadb"); + protected void setup(final TestDestinationEnv testEnv) throws Exception { + final DockerImageName mcsImage = DockerImageName.parse("fengdi/columnstore:1.5.2").asCompatibleSubstituteFor("mariadb"); db = new MariaDBContainer(mcsImage); db.start(); - String createUser = String.format("CREATE USER '%s'@'%%' IDENTIFIED BY '%s';", db.getUsername(), db.getPassword()); - String grantAll = String.format("GRANT ALL PRIVILEGES ON *.* TO '%s'@'%%' IDENTIFIED BY '%s';", db.getUsername(), db.getPassword()); - String createDb = String.format("CREATE DATABASE %s DEFAULT CHARSET = utf8;", db.getDatabaseName()); + final String createUser = String.format("CREATE USER '%s'@'%%' IDENTIFIED BY '%s';", db.getUsername(), db.getPassword()); + final String grantAll = String.format("GRANT ALL PRIVILEGES ON *.* TO '%s'@'%%' IDENTIFIED BY '%s';", db.getUsername(), db.getPassword()); + final String createDb = String.format("CREATE DATABASE %s DEFAULT CHARSET = utf8;", db.getDatabaseName()); db.execInContainer("mariadb", "-e", createUser + grantAll + createDb); } @Override - protected void tearDown(TestDestinationEnv testEnv) { + protected void tearDown(final TestDestinationEnv testEnv) { db.stop(); db.close(); } diff --git a/airbyte-integrations/connectors/destination-mysql/src/main/java/io/airbyte/integrations/destination/mysql/MySQLSqlOperations.java b/airbyte-integrations/connectors/destination-mysql/src/main/java/io/airbyte/integrations/destination/mysql/MySQLSqlOperations.java index 181fb8d8bf9424..78957bf88ac249 100644 --- a/airbyte-integrations/connectors/destination-mysql/src/main/java/io/airbyte/integrations/destination/mysql/MySQLSqlOperations.java +++ b/airbyte-integrations/connectors/destination-mysql/src/main/java/io/airbyte/integrations/destination/mysql/MySQLSqlOperations.java @@ -16,7 +16,6 @@ import java.sql.SQLException; import java.sql.Statement; import java.util.List; -import java.util.stream.Collectors; public class MySQLSqlOperations extends JdbcSqlOperations { @@ -100,9 +99,10 @@ private void tryEnableLocalFile(final JdbcDatabase database) throws SQLException } private double getVersion(final JdbcDatabase database) throws SQLException { - final List value = database.unsafeResultSetQuery(connection -> connection.createStatement().executeQuery("select version()"), - resultSet -> resultSet.getString("version()")).collect(Collectors.toList()); - return Double.parseDouble(value.get(0).substring(0, 3)); + final List versions = database.queryStrings( + connection -> connection.createStatement().executeQuery("select version()"), + resultSet -> resultSet.getString("version()")); + return Double.parseDouble(versions.get(0).substring(0, 3)); } VersionCompatibility isCompatibleVersion(final JdbcDatabase database) throws SQLException { @@ -116,11 +116,10 @@ public boolean isSchemaRequired() { } private boolean checkIfLocalFileIsEnabled(final JdbcDatabase database) throws SQLException { - final List value = - database.unsafeResultSetQuery(connection -> connection.createStatement().executeQuery("SHOW GLOBAL VARIABLES LIKE 'local_infile'"), - resultSet -> resultSet.getString("Value")).collect(Collectors.toList()); - - return value.get(0).equalsIgnoreCase("on"); + final List localFiles = database.queryStrings( + connection -> connection.createStatement().executeQuery("SHOW GLOBAL VARIABLES LIKE 'local_infile'"), + resultSet -> resultSet.getString("Value")); + return localFiles.get(0).equalsIgnoreCase("on"); } @Override diff --git a/airbyte-integrations/connectors/destination-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/oracle_strict_encrypt/OracleStrictEncryptDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/oracle_strict_encrypt/OracleStrictEncryptDestinationAcceptanceTest.java index 6cfde5013241d5..be74f4c06cffb6 100644 --- a/airbyte-integrations/connectors/destination-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/oracle_strict_encrypt/OracleStrictEncryptDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/oracle_strict_encrypt/OracleStrictEncryptDestinationAcceptanceTest.java @@ -112,12 +112,8 @@ protected List resolveIdentifier(final String identifier) { private List retrieveRecordsFromTable(final String tableName, final String schemaName) throws SQLException { - final List result = getDatabase(config) - .query(ctx -> ctx.fetch( - String.format("SELECT * FROM %s.%s ORDER BY %s ASC", schemaName, tableName, - OracleDestination.COLUMN_NAME_EMITTED_AT)) - .stream() - .collect(Collectors.toList())); + final String query = String.format("SELECT * FROM %s.%s ORDER BY %s ASC", schemaName, tableName, OracleDestination.COLUMN_NAME_EMITTED_AT); + final List result = getDatabase(config).query(ctx -> ctx.fetch(query).stream().toList()); return result .stream() .map(r -> r.formatJSON(JSON_FORMAT)) @@ -180,9 +176,9 @@ public void testEncryption() throws SQLException { JdbcUtils.parseJdbcParameters("oracle.net.encryption_client=REQUIRED;" + "oracle.net.encryption_types_client=( " + algorithm + " )", ";")); - final String network_service_banner = + final String networkServiceBanner = "select network_service_banner from v$session_connect_info where sid in (select distinct sid from v$mystat)"; - final List collect = database.unsafeQuery(network_service_banner).collect(Collectors.toList()); + final List collect = database.queryJsons(networkServiceBanner); assertThat(collect.get(2).get("NETWORK_SERVICE_BANNER").asText(), equals("Oracle Advanced Security: " + algorithm + " encryption")); @@ -205,8 +201,8 @@ public void testCheckProtocol() throws SQLException { JdbcUtils.parseJdbcParameters("oracle.net.encryption_client=REQUIRED;" + "oracle.net.encryption_types_client=( " + algorithm + " )", ";")); - final String network_service_banner = "SELECT sys_context('USERENV', 'NETWORK_PROTOCOL') as network_protocol FROM dual"; - final List collect = database.unsafeQuery(network_service_banner).collect(Collectors.toList()); + final String networkServiceBanner = "SELECT sys_context('USERENV', 'NETWORK_PROTOCOL') as network_protocol FROM dual"; + final List collect = database.queryJsons(networkServiceBanner); assertEquals("tcp", collect.get(0).get("NETWORK_PROTOCOL").asText()); } diff --git a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/NneOracleDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/NneOracleDestinationAcceptanceTest.java index 8a65723e2a0c00..69794489206995 100644 --- a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/NneOracleDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/NneOracleDestinationAcceptanceTest.java @@ -16,7 +16,6 @@ import java.sql.SQLException; import java.util.List; import java.util.Map; -import java.util.stream.Collectors; import org.junit.Test; public class NneOracleDestinationAcceptanceTest extends UnencryptedOracleDestinationAcceptanceTest { @@ -40,9 +39,9 @@ public void testEncryption() throws SQLException { "oracle.jdbc.driver.OracleDriver", getAdditionalProperties(algorithm)); - final String network_service_banner = + final String networkServiceBanner = "select network_service_banner from v$session_connect_info where sid in (select distinct sid from v$mystat)"; - final List collect = database.unsafeQuery(network_service_banner).toList(); + final List collect = database.queryJsons(networkServiceBanner); assertThat(collect.get(2).get("NETWORK_SERVICE_BANNER").asText(), equals("Oracle Advanced Security: " + algorithm + " encryption")); @@ -73,8 +72,8 @@ public void testCheckProtocol() throws SQLException { "oracle.jdbc.driver.OracleDriver", getAdditionalProperties(algorithm)); - final String network_service_banner = "SELECT sys_context('USERENV', 'NETWORK_PROTOCOL') as network_protocol FROM dual"; - final List collect = database.unsafeQuery(network_service_banner).collect(Collectors.toList()); + final String networkServiceBanner = "SELECT sys_context('USERENV', 'NETWORK_PROTOCOL') as network_protocol FROM dual"; + final List collect = database.queryJsons(networkServiceBanner); assertEquals("tcp", collect.get(0).get("NETWORK_PROTOCOL").asText()); } diff --git a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/UnencryptedOracleDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/UnencryptedOracleDestinationAcceptanceTest.java index 1342c57dafd375..1ec98f5aab56ef 100644 --- a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/UnencryptedOracleDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/UnencryptedOracleDestinationAcceptanceTest.java @@ -180,9 +180,9 @@ public void testNoneEncryption() throws SQLException { config.get("sid").asText()), "oracle.jdbc.driver.OracleDriver"); - final String network_service_banner = + final String networkServiceBanner = "select network_service_banner from v$session_connect_info where sid in (select distinct sid from v$mystat)"; - final List collect = database.unsafeQuery(network_service_banner).collect(Collectors.toList()); + final List collect = database.queryJsons(networkServiceBanner); assertTrue(collect.get(1).get("NETWORK_SERVICE_BANNER").asText() .contains("Oracle Advanced Security: encryption")); diff --git a/airbyte-integrations/connectors/source-clickhouse/src/main/java/io/airbyte/integrations/source/clickhouse/ClickHouseSource.java b/airbyte-integrations/connectors/source-clickhouse/src/main/java/io/airbyte/integrations/source/clickhouse/ClickHouseSource.java index 3fd083e4387986..abc10ec86ef7ac 100644 --- a/airbyte-integrations/connectors/source-clickhouse/src/main/java/io/airbyte/integrations/source/clickhouse/ClickHouseSource.java +++ b/airbyte-integrations/connectors/source-clickhouse/src/main/java/io/airbyte/integrations/source/clickhouse/ClickHouseSource.java @@ -46,18 +46,16 @@ protected Map> discoverPrimaryKeys(final JdbcDatabase datab final List>> tableInfos) { return tableInfos.stream() .collect(Collectors.toMap( - tableInfo -> sourceOperations - .getFullyQualifiedTableName(tableInfo.getNameSpace(), tableInfo.getName()), + tableInfo -> sourceOperations.getFullyQualifiedTableName(tableInfo.getNameSpace(), tableInfo.getName()), tableInfo -> { try { - return database.unsafeResultSetQuery(connection -> { + return database.queryStrings(connection -> { final String sql = "SELECT name FROM system.columns WHERE database = ? AND table = ? AND is_in_primary_key = 1"; final PreparedStatement preparedStatement = connection.prepareStatement(sql); preparedStatement.setString(1, tableInfo.getNameSpace()); preparedStatement.setString(2, tableInfo.getName()); return preparedStatement.executeQuery(); - - }, resultSet -> resultSet.getString("name")).collect(Collectors.toList()); + }, resultSet -> resultSet.getString("name")); } catch (final SQLException e) { throw new RuntimeException(e); } diff --git a/airbyte-integrations/connectors/source-cockroachdb/src/main/java/io/airbyte/integrations/source/cockroachdb/CockroachDbSource.java b/airbyte-integrations/connectors/source-cockroachdb/src/main/java/io/airbyte/integrations/source/cockroachdb/CockroachDbSource.java index dfe64063cf5923..571091d4046723 100644 --- a/airbyte-integrations/connectors/source-cockroachdb/src/main/java/io/airbyte/integrations/source/cockroachdb/CockroachDbSource.java +++ b/airbyte-integrations/connectors/source-cockroachdb/src/main/java/io/airbyte/integrations/source/cockroachdb/CockroachDbSource.java @@ -29,6 +29,7 @@ import java.util.List; import java.util.Set; import java.util.stream.Collectors; +import java.util.stream.Stream; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -101,10 +102,9 @@ public AutoCloseableIterator read(final JsonNode config, @Override public Set getPrivilegesTableForCurrentUser(final JdbcDatabase database, final String schema) throws SQLException { - return database - .unsafeQuery(getPrivileges(database), sourceOperations::rowToJson) - .map(this::getPrivilegeDto) - .collect(Collectors.toSet()); + try (final Stream stream = database.unsafeQuery(getPrivileges(database), sourceOperations::rowToJson)) { + return stream.map(this::getPrivilegeDto).collect(Collectors.toSet()); + } } @Override diff --git a/airbyte-integrations/connectors/source-db2/src/main/java/io.airbyte.integrations.source.db2/Db2Source.java b/airbyte-integrations/connectors/source-db2/src/main/java/io.airbyte.integrations.source.db2/Db2Source.java index f48de8c9e2dc1a..d817b10f6acfdc 100644 --- a/airbyte-integrations/connectors/source-db2/src/main/java/io.airbyte.integrations.source.db2/Db2Source.java +++ b/airbyte-integrations/connectors/source-db2/src/main/java/io.airbyte.integrations.source.db2/Db2Source.java @@ -26,6 +26,7 @@ import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; +import java.util.stream.Stream; import org.apache.commons.lang3.RandomStringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -89,10 +90,9 @@ public Set getExcludedInternalNameSpaces() { @Override public Set getPrivilegesTableForCurrentUser(final JdbcDatabase database, final String schema) throws SQLException { - return database - .unsafeQuery(getPrivileges(), sourceOperations::rowToJson) - .map(this::getPrivilegeDto) - .collect(Collectors.toSet()); + try (final Stream stream = database.unsafeQuery(getPrivileges(), sourceOperations::rowToJson)) { + return stream.map(this::getPrivilegeDto).collect(Collectors.toSet()); + } } @Override diff --git a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlSource.java b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlSource.java index 3af72f59dea6be..d8b97821ea4e93 100644 --- a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlSource.java +++ b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlSource.java @@ -100,15 +100,12 @@ public AutoCloseableIterator queryTableIncremental(final JdbcDatabase connection -> { LOGGER.info("Preparing query for table: {}", tableName); - final String identifierQuoteString = connection.getMetaData() - .getIdentifierQuoteString(); - final List newColumnNames = getWrappedColumn(database, - columnNames, schemaName, tableName, identifierQuoteString); + final String identifierQuoteString = connection.getMetaData().getIdentifierQuoteString(); + final List newColumnNames = getWrappedColumn(database, columnNames, schemaName, tableName, identifierQuoteString); final String sql = String.format("SELECT %s FROM %s WHERE %s > ?", String.join(",", newColumnNames), - sourceOperations - .getFullyQualifiedTableNameWithQuoting(connection, schemaName, tableName), + sourceOperations.getFullyQualifiedTableNameWithQuoting(connection, schemaName, tableName), sourceOperations.enquoteIdentifier(connection, cursorField)); LOGGER.info("Prepared SQL query for queryTableIncremental is: " + sql); @@ -251,15 +248,15 @@ public List> getCheckOperations(final J protected void assertCdcEnabledInDb(final JsonNode config, final JdbcDatabase database) throws SQLException { - final List queryResponse = database.unsafeQuery(connection -> { + final List queryResponse = database.queryJsons(connection -> { final String sql = "SELECT name, is_cdc_enabled FROM sys.databases WHERE name = ?"; final PreparedStatement ps = connection.prepareStatement(sql); ps.setString(1, config.get("database").asText()); - LOGGER - .info(String.format("Checking that cdc is enabled on database '%s' using the query: '%s'", - config.get("database").asText(), sql)); + LOGGER.info(String.format("Checking that cdc is enabled on database '%s' using the query: '%s'", + config.get("database").asText(), sql)); return ps; - }, sourceOperations::rowToJson).collect(toList()); + }, sourceOperations::rowToJson); + if (queryResponse.size() < 1) { throw new RuntimeException(String.format( "Couldn't find '%s' in sys.databases table. Please check the spelling and that the user has relevant permissions (see docs).", @@ -274,15 +271,15 @@ protected void assertCdcEnabledInDb(final JsonNode config, final JdbcDatabase da protected void assertCdcSchemaQueryable(final JsonNode config, final JdbcDatabase database) throws SQLException { - final List queryResponse = database.unsafeQuery(connection -> { - final String sql = - "USE " + config.get("database").asText() + "; SELECT * FROM cdc.change_tables"; + final List queryResponse = database.queryJsons(connection -> { + final String sql = "USE " + config.get("database").asText() + "; SELECT * FROM cdc.change_tables"; final PreparedStatement ps = connection.prepareStatement(sql); LOGGER.info(String.format( "Checking user '%s' can query the cdc schema and that we have at least 1 cdc enabled table using the query: '%s'", config.get("username").asText(), sql)); return ps; - }, sourceOperations::rowToJson).collect(toList()); + }, sourceOperations::rowToJson); + // Ensure at least one available CDC table if (queryResponse.size() < 1) { throw new RuntimeException( @@ -293,22 +290,21 @@ protected void assertCdcSchemaQueryable(final JsonNode config, final JdbcDatabas // todo: ensure this works for Azure managed SQL (since it uses different sql server agent) protected void assertSqlServerAgentRunning(final JdbcDatabase database) throws SQLException { try { - final List queryResponse = database.unsafeQuery(connection -> { + final List queryResponse = database.queryJsons(connection -> { final String sql = "SELECT status_desc FROM sys.dm_server_services WHERE [servicename] LIKE 'SQL Server Agent%' OR [servicename] LIKE 'SQL Server 代理%' "; final PreparedStatement ps = connection.prepareStatement(sql); - LOGGER.info(String - .format("Checking that the SQL Server Agent is running using the query: '%s'", sql)); + LOGGER.info(String.format("Checking that the SQL Server Agent is running using the query: '%s'", sql)); return ps; - }, sourceOperations::rowToJson).collect(toList()); + }, sourceOperations::rowToJson); + if (!(queryResponse.get(0).get("status_desc").toString().contains("Running"))) { throw new RuntimeException(String.format( "The SQL Server Agent is not running. Current state: '%s'. Please check the documentation on ensuring SQL Server Agent is running.", queryResponse.get(0).get("status_desc").toString())); } } catch (final Exception e) { - if (e.getCause() != null && e.getCause().getClass() - .equals(com.microsoft.sqlserver.jdbc.SQLServerException.class)) { + if (e.getCause() != null && e.getCause().getClass().equals(com.microsoft.sqlserver.jdbc.SQLServerException.class)) { LOGGER.warn(String.format( "Skipping check for whether the SQL Server Agent is running, SQLServerException thrown: '%s'", e.getMessage())); @@ -320,7 +316,7 @@ protected void assertSqlServerAgentRunning(final JdbcDatabase database) throws S protected void assertSnapshotIsolationAllowed(final JsonNode config, final JdbcDatabase database) throws SQLException { - final List queryResponse = database.unsafeQuery(connection -> { + final List queryResponse = database.queryJsons(connection -> { final String sql = "SELECT name, snapshot_isolation_state FROM sys.databases WHERE name = ?"; final PreparedStatement ps = connection.prepareStatement(sql); ps.setString(1, config.get("database").asText()); @@ -328,7 +324,8 @@ protected void assertSnapshotIsolationAllowed(final JsonNode config, final JdbcD "Checking that snapshot isolation is enabled on database '%s' using the query: '%s'", config.get("database").asText(), sql)); return ps; - }, sourceOperations::rowToJson).collect(toList()); + }, sourceOperations::rowToJson); + if (queryResponse.size() < 1) { throw new RuntimeException(String.format( "Couldn't find '%s' in sys.databases table. Please check the spelling and that the user has relevant permissions (see docs).", diff --git a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/MySqlCdcTargetPosition.java b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/MySqlCdcTargetPosition.java index de02f827e0e6c6..4c5901676e187c 100644 --- a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/MySqlCdcTargetPosition.java +++ b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/MySqlCdcTargetPosition.java @@ -11,7 +11,7 @@ import java.sql.SQLException; import java.util.List; import java.util.Objects; -import java.util.stream.Collectors; +import java.util.stream.Stream; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -28,8 +28,7 @@ public MySqlCdcTargetPosition(final String fileName, final Integer position) { @Override public boolean equals(final Object obj) { - if (obj instanceof MySqlCdcTargetPosition) { - final MySqlCdcTargetPosition cdcTargetPosition = (MySqlCdcTargetPosition) obj; + if (obj instanceof final MySqlCdcTargetPosition cdcTargetPosition) { return fileName.equals(cdcTargetPosition.fileName) && cdcTargetPosition.position.equals(position); } return false; @@ -46,20 +45,19 @@ public String toString() { } public static MySqlCdcTargetPosition targetPosition(final JdbcDatabase database) { - try { - final List masterStatus = database.unsafeResultSetQuery( - connection -> connection.createStatement().executeQuery("SHOW MASTER STATUS"), - resultSet -> { - final String file = resultSet.getString("File"); - final int position = resultSet.getInt("Position"); - if (file == null || position == 0) { - return new MySqlCdcTargetPosition(null, null); - } - return new MySqlCdcTargetPosition(file, position); - }).collect(Collectors.toList()); + try (final Stream stream = database.unsafeResultSetQuery( + connection -> connection.createStatement().executeQuery("SHOW MASTER STATUS"), + resultSet -> { + final String file = resultSet.getString("File"); + final int position = resultSet.getInt("Position"); + if (file == null || position == 0) { + return new MySqlCdcTargetPosition(null, null); + } + return new MySqlCdcTargetPosition(file, position); + })) { + final List masterStatus = stream.toList(); final MySqlCdcTargetPosition targetPosition = masterStatus.get(0); LOGGER.info("Target File position : " + targetPosition); - return targetPosition; } catch (final SQLException e) { throw new RuntimeException(e); diff --git a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/helpers/CdcConfigurationHelper.java b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/helpers/CdcConfigurationHelper.java index b2df3e91225919..f4c55d6257d52b 100644 --- a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/helpers/CdcConfigurationHelper.java +++ b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/helpers/CdcConfigurationHelper.java @@ -4,8 +4,6 @@ package io.airbyte.integrations.source.mysql.helpers; -import static java.util.stream.Collectors.toList; - import com.fasterxml.jackson.databind.JsonNode; import io.airbyte.commons.functional.CheckedConsumer; import io.airbyte.commons.json.Jsons; @@ -15,7 +13,7 @@ import java.util.List; import java.util.Map; import java.util.Optional; -import java.util.stream.Collectors; +import java.util.stream.Stream; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -43,14 +41,14 @@ public class CdcConfigurationHelper { * @param offset - saved cdc offset with required binlog file * @param database - database */ - public static void checkBinlog(JsonNode offset, JdbcDatabase database) { - Optional binlogOptional = getBinlog(offset); + public static void checkBinlog(final JsonNode offset, final JdbcDatabase database) { + final Optional binlogOptional = getBinlog(offset); binlogOptional.ifPresent(binlog -> { if (isBinlogAvailable(binlog, database)) { LOGGER.info(""" Binlog %s is available""".formatted(binlog)); } else { - String error = + final String error = """ Binlog %s is not available. This is a critical error, it means that requested binlog is not present on mysql server. To fix data synchronization you need to reset your data. Please check binlog retention policy configurations.""" .formatted(binlog); @@ -73,46 +71,44 @@ public static List> getCheckOperations( } - private static boolean isBinlogAvailable(String binlog, JdbcDatabase database) { - try { - List binlogs = database.unsafeResultSetQuery(connection -> connection.createStatement().executeQuery("SHOW BINARY LOGS"), - resultSet -> resultSet.getString("Log_name")).collect(Collectors.toList()); + private static boolean isBinlogAvailable(final String binlog, final JdbcDatabase database) { + if (binlog.isEmpty()) { + return false; + } - return !binlog.isEmpty() && binlogs.stream().anyMatch(e -> e.equals(binlog)); - } catch (SQLException e) { + try (final Stream binlogs = database.unsafeResultSetQuery( + connection -> connection.createStatement().executeQuery("SHOW BINARY LOGS"), + resultSet -> resultSet.getString("Log_name"))) { + return binlogs.anyMatch(e -> e.equals(binlog)); + } catch (final SQLException e) { LOGGER.error("Can not get binlog list. Error: ", e); throw new RuntimeException(e); } } - private static Optional getBinlog(JsonNode offset) { - JsonNode node = offset.get(CDC_OFFSET); - Iterator> fields = node.fields(); + private static Optional getBinlog(final JsonNode offset) { + final JsonNode node = offset.get(CDC_OFFSET); + final Iterator> fields = node.fields(); while (fields.hasNext()) { - Map.Entry jsonField = fields.next(); + final Map.Entry jsonField = fields.next(); return Optional.ofNullable(Jsons.deserialize(jsonField.getValue().asText()).path("file").asText()); } return Optional.empty(); } - private static CheckedConsumer getCheckOperation(String name, String value) { + private static CheckedConsumer getCheckOperation(final String name, final String value) { return database -> { - final List result = database.unsafeResultSetQuery(connection -> { - final String sql = """ - show variables where Variable_name = '%s'""".formatted(name); - - return connection.createStatement().executeQuery(sql); - }, resultSet -> resultSet.getString("Value")).collect(toList()); + final List result = database.queryStrings( + connection -> connection.createStatement().executeQuery(String.format("show variables where Variable_name = '%s'", name)), + resultSet -> resultSet.getString("Value")); if (result.size() != 1) { - throw new RuntimeException(""" - Could not query the variable %s""".formatted(name)); + throw new RuntimeException("Could not query the variable " + name); } final String resultValue = result.get(0); if (!resultValue.equalsIgnoreCase(value)) { - throw new RuntimeException(""" - The variable %s should be set to %s, but it is : %s""".formatted(name, value, resultValue)); + throw new RuntimeException(String.format("The variable \"%s\" should be set to \"%s\", but it is \"%s\"", name, value, resultValue)); } }; } diff --git a/airbyte-integrations/connectors/source-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/oracle_strict_encrypt/OracleSourceNneAcceptanceTest.java b/airbyte-integrations/connectors/source-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/oracle_strict_encrypt/OracleSourceNneAcceptanceTest.java index f012936d4649e9..720226fffbcf96 100644 --- a/airbyte-integrations/connectors/source-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/oracle_strict_encrypt/OracleSourceNneAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/oracle_strict_encrypt/OracleSourceNneAcceptanceTest.java @@ -42,9 +42,9 @@ public void testEncryption() throws SQLException { "oracle.net.encryption_types_client=( " + algorithm + " )")); - final String network_service_banner = + final String networkServiceBanner = "select network_service_banner from v$session_connect_info where sid in (select distinct sid from v$mystat)"; - final List collect = database.unsafeQuery(network_service_banner).toList(); + final List collect = database.queryJsons(networkServiceBanner); assertTrue(collect.get(2).get("NETWORK_SERVICE_BANNER").asText() .contains(algorithm + " Encryption")); @@ -71,8 +71,8 @@ public void testCheckProtocol() throws SQLException { JdbcUtils.parseJdbcParameters("oracle.net.encryption_client=REQUIRED;" + "oracle.net.encryption_types_client=( " + algorithm + " )", ";")); - final String network_service_banner = "SELECT sys_context('USERENV', 'NETWORK_PROTOCOL') as network_protocol FROM dual"; - final List collect = database.unsafeQuery(network_service_banner).toList(); + final String networkServiceBanner = "SELECT sys_context('USERENV', 'NETWORK_PROTOCOL') as network_protocol FROM dual"; + final List collect = database.queryJsons(networkServiceBanner); assertEquals("tcp", collect.get(0).get("NETWORK_PROTOCOL").asText()); } diff --git a/airbyte-integrations/connectors/source-oracle/src/test-integration/java/io/airbyte/integrations/source/oracle/OracleSourceNneAcceptanceTest.java b/airbyte-integrations/connectors/source-oracle/src/test-integration/java/io/airbyte/integrations/source/oracle/OracleSourceNneAcceptanceTest.java index e67716e4550444..aaabf381fcf1aa 100644 --- a/airbyte-integrations/connectors/source-oracle/src/test-integration/java/io/airbyte/integrations/source/oracle/OracleSourceNneAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-oracle/src/test-integration/java/io/airbyte/integrations/source/oracle/OracleSourceNneAcceptanceTest.java @@ -16,7 +16,6 @@ import io.airbyte.db.jdbc.JdbcUtils; import java.sql.SQLException; import java.util.List; -import java.util.stream.Collectors; import org.junit.jupiter.api.Test; public class OracleSourceNneAcceptanceTest extends OracleSourceAcceptanceTest { @@ -43,12 +42,11 @@ public void testEncrytion() throws SQLException { "oracle.net.encryption_types_client=( " + algorithm + " )")); - final String network_service_banner = + final String networkServiceBanner = "select network_service_banner from v$session_connect_info where sid in (select distinct sid from v$mystat)"; - final List collect = database.unsafeQuery(network_service_banner).collect(Collectors.toList()); + final List collect = database.queryJsons(networkServiceBanner); - assertTrue(collect.get(2).get("NETWORK_SERVICE_BANNER").asText() - .contains(algorithm + " Encryption")); + assertTrue(collect.get(2).get("NETWORK_SERVICE_BANNER").asText().contains(algorithm + " Encryption")); } @Test @@ -62,12 +60,11 @@ public void testNoneEncrytion() throws SQLException { config.get("sid").asText()), "oracle.jdbc.driver.OracleDriver"); - final String network_service_banner = + final String networkServiceBanner = "select network_service_banner from v$session_connect_info where sid in (select distinct sid from v$mystat)"; - final List collect = database.unsafeQuery(network_service_banner).collect(Collectors.toList()); + final List collect = database.queryJsons(networkServiceBanner); - assertTrue(collect.get(1).get("NETWORK_SERVICE_BANNER").asText() - .contains("Encryption service")); + assertTrue(collect.get(1).get("NETWORK_SERVICE_BANNER").asText().contains("Encryption service")); } @Test @@ -92,8 +89,8 @@ public void testCheckProtocol() throws SQLException { "oracle.net.encryption_types_client=( " + algorithm + " )")); - final String network_service_banner = "SELECT sys_context('USERENV', 'NETWORK_PROTOCOL') as network_protocol FROM dual"; - final List collect = database.unsafeQuery(network_service_banner).collect(Collectors.toList()); + final String networkServiceBanner = "SELECT sys_context('USERENV', 'NETWORK_PROTOCOL') as network_protocol FROM dual"; + final List collect = database.queryJsons(networkServiceBanner); assertEquals("tcp", collect.get(0).get("NETWORK_PROTOCOL").asText()); } diff --git a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSource.java b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSource.java index bbc397b0cd2ba6..8ea0f7dacd7d88 100644 --- a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSource.java +++ b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSource.java @@ -14,6 +14,7 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableMap; import io.airbyte.commons.functional.CheckedConsumer; +import io.airbyte.commons.functional.CheckedFunction; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.util.AutoCloseableIterator; import io.airbyte.db.jdbc.JdbcDatabase; @@ -33,6 +34,7 @@ import io.airbyte.protocol.models.CommonField; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.SyncMode; +import java.sql.Connection; import java.sql.JDBCType; import java.sql.PreparedStatement; import java.sql.SQLException; @@ -155,7 +157,7 @@ public List> getCheckOperations(final J if (isCdc(config)) { checkOperations.add(database -> { - final List matchingSlots = database.unsafeQuery(connection -> { + final List matchingSlots = database.queryJsons(connection -> { final String sql = "SELECT * FROM pg_replication_slots WHERE slot_name = ? AND plugin = ? AND database = ?"; final PreparedStatement ps = connection.prepareStatement(sql); ps.setString(1, config.get("replication_method").get("replication_slot").asText()); @@ -166,7 +168,7 @@ public List> getCheckOperations(final J "Attempting to find the named replication slot using the query: " + ps.toString()); return ps; - }, sourceOperations::rowToJson).collect(toList()); + }, sourceOperations::rowToJson); if (matchingSlots.size() != 1) { throw new RuntimeException( @@ -177,15 +179,12 @@ public List> getCheckOperations(final J }); checkOperations.add(database -> { - final List matchingPublications = database.unsafeQuery(connection -> { - final PreparedStatement ps = connection - .prepareStatement("SELECT * FROM pg_publication WHERE pubname = ?"); + final List matchingPublications = database.queryJsons(connection -> { + final PreparedStatement ps = connection.prepareStatement("SELECT * FROM pg_publication WHERE pubname = ?"); ps.setString(1, config.get("replication_method").get("publication").asText()); - - LOGGER.info("Attempting to find the publication using the query: " + ps.toString()); - + LOGGER.info("Attempting to find the publication using the query: " + ps); return ps; - }, sourceOperations::rowToJson).collect(toList()); + }, sourceOperations::rowToJson); if (matchingPublications.size() != 1) { throw new RuntimeException( @@ -274,7 +273,7 @@ private static AirbyteStream removeIncrementalWithoutPk(final AirbyteStream stre public Set getPrivilegesTableForCurrentUser(final JdbcDatabase database, final String schema) throws SQLException { - return database.unsafeQuery(connection -> { + final CheckedFunction statementCreator = connection -> { final PreparedStatement ps = connection.prepareStatement( """ SELECT DISTINCT table_catalog, @@ -316,8 +315,9 @@ public Set getPrivilegesTableForCurrentUser(final JdbcDatabase ps.setString(2, username); ps.setString(3, username); return ps; - }, sourceOperations::rowToJson) - .collect(toSet()) + }; + + return database.queryJsons(statementCreator, sourceOperations::rowToJson) .stream() .map(e -> JdbcPrivilegeDto.builder() .schemaName(e.get("table_schema").asText()) From 075bec3882337afba193913ef926226c1e165b56 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 3 May 2022 15:50:38 -0500 Subject: [PATCH 068/152] Bump minimist from 1.2.5 to 1.2.6 in /docusaurus (#11766) Bumps [minimist](https://github.com/substack/minimist) from 1.2.5 to 1.2.6. - [Release notes](https://github.com/substack/minimist/releases) - [Commits](https://github.com/substack/minimist/compare/1.2.5...1.2.6) --- updated-dependencies: - dependency-name: minimist dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docusaurus/package-lock.json | 418 +---------------------------------- docusaurus/yarn.lock | 6 +- 2 files changed, 9 insertions(+), 415 deletions(-) diff --git a/docusaurus/package-lock.json b/docusaurus/package-lock.json index 8f19cf4feeb2b9..2edf41de416478 100644 --- a/docusaurus/package-lock.json +++ b/docusaurus/package-lock.json @@ -1139,81 +1139,6 @@ "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/plugin-syntax-jsx/node_modules/@babel/core": { - "version": "7.12.9", - "license": "MIT", - "peer": true, - "dependencies": { - "@babel/code-frame": "^7.10.4", - "@babel/generator": "^7.12.5", - "@babel/helper-module-transforms": "^7.12.1", - "@babel/helpers": "^7.12.5", - "@babel/parser": "^7.12.7", - "@babel/template": "^7.12.7", - "@babel/traverse": "^7.12.9", - "@babel/types": "^7.12.7", - "convert-source-map": "^1.7.0", - "debug": "^4.1.0", - "gensync": "^1.0.0-beta.1", - "json5": "^2.1.2", - "lodash": "^4.17.19", - "resolve": "^1.3.2", - "semver": "^5.4.1", - "source-map": "^0.5.0" - }, - "engines": { - "node": ">=6.9.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/babel" - } - }, - "node_modules/@babel/plugin-syntax-jsx/node_modules/debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", - "license": "MIT", - "peer": true, - "dependencies": { - "ms": "2.1.2" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/@babel/plugin-syntax-jsx/node_modules/ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "license": "MIT", - "peer": true - }, - "node_modules/@babel/plugin-syntax-jsx/node_modules/semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", - "license": "ISC", - "peer": true, - "bin": { - "semver": "bin/semver" - } - }, - "node_modules/@babel/plugin-syntax-jsx/node_modules/source-map": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", - "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", - "license": "BSD-3-Clause", - "peer": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/@babel/plugin-syntax-logical-assignment-operators": { "version": "7.10.4", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz", @@ -5547,169 +5472,6 @@ } } }, - "node_modules/css-minimizer-webpack-plugin/node_modules/css-declaration-sorter": { - "version": "6.1.4", - "resolved": "https://registry.npmjs.org/css-declaration-sorter/-/css-declaration-sorter-6.1.4.tgz", - "integrity": "sha512-lpfkqS0fctcmZotJGhnxkIyJWvBXgpyi2wsFd4J8VB7wzyrT6Ch/3Q+FMNJpjK4gu1+GN5khOnpU2ZVKrLbhCw==", - "license": "ISC", - "dependencies": { - "timsort": "^0.3.0" - }, - "engines": { - "node": ">= 10" - }, - "peerDependencies": { - "postcss": "^8.0.9" - } - }, - "node_modules/css-minimizer-webpack-plugin/node_modules/cssnano": { - "version": "5.1.4", - "resolved": "https://registry.npmjs.org/cssnano/-/cssnano-5.1.4.tgz", - "integrity": "sha512-hbfhVZreEPyzl+NbvRsjNo54JOX80b+j6nqG2biLVLaZHJEiqGyMh4xDGHtwhUKd5p59mj2GlDqlUBwJUuIu5A==", - "license": "MIT", - "dependencies": { - "cssnano-preset-default": "^*", - "lilconfig": "^2.0.3", - "yaml": "^1.10.2" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/cssnano" - }, - "peerDependencies": { - "postcss": "^8.2.15" - } - }, - "node_modules/css-minimizer-webpack-plugin/node_modules/cssnano-preset-default": { - "version": "5.2.4", - "resolved": "https://registry.npmjs.org/cssnano-preset-default/-/cssnano-preset-default-5.2.4.tgz", - "integrity": "sha512-w1Gg8xsebln6/axZ6qDFQHuglrGfbIHOIx0g4y9+etRlRab8CGpSpe6UMsrgJe4zhCaJ0LwLmc+PhdLRTwnhIA==", - "license": "MIT", - "dependencies": { - "css-declaration-sorter": "^6.0.3", - "cssnano-utils": "^*", - "postcss-calc": "^8.2.3", - "postcss-colormin": "^*", - "postcss-convert-values": "^*", - "postcss-discard-comments": "^*", - "postcss-discard-duplicates": "^*", - "postcss-discard-empty": "^*", - "postcss-discard-overridden": "^*", - "postcss-merge-longhand": "^*", - "postcss-merge-rules": "^*", - "postcss-minify-font-values": "^*", - "postcss-minify-gradients": "^*", - "postcss-minify-params": "^*", - "postcss-minify-selectors": "^*", - "postcss-normalize-charset": "^*", - "postcss-normalize-display-values": "^*", - "postcss-normalize-positions": "^*", - "postcss-normalize-repeat-style": "^*", - "postcss-normalize-string": "^*", - "postcss-normalize-timing-functions": "^*", - "postcss-normalize-unicode": "^*", - "postcss-normalize-url": "^*", - "postcss-normalize-whitespace": "^*", - "postcss-ordered-values": "^*", - "postcss-reduce-initial": "^*", - "postcss-reduce-transforms": "^*", - "postcss-svgo": "^*", - "postcss-unique-selectors": "^*" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.15" - } - }, - "node_modules/css-minimizer-webpack-plugin/node_modules/postcss-merge-longhand": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/postcss-merge-longhand/-/postcss-merge-longhand-5.1.2.tgz", - "integrity": "sha512-18/bp9DZnY1ai9RlahOfLBbmIUKfKFPASxRCiZ1vlpZqWPCn8qWPFlEozqmWL+kBtcEQmG8W9YqGCstDImvp/Q==", - "license": "MIT", - "dependencies": { - "postcss-value-parser": "^4.2.0", - "stylehacks": "^*" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.15" - } - }, - "node_modules/css-minimizer-webpack-plugin/node_modules/postcss-merge-rules": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/postcss-merge-rules/-/postcss-merge-rules-5.1.0.tgz", - "integrity": "sha512-NecukEJovQ0mG7h7xV8wbYAkXGTO3MPKnXvuiXzOKcxoOodfTTKYjeo8TMhAswlSkjcPIBlnKbSFcTuVSDaPyQ==", - "license": "MIT", - "dependencies": { - "browserslist": "^4.16.6", - "caniuse-api": "^3.0.0", - "cssnano-utils": "^3.1.0", - "postcss-selector-parser": "^6.0.5" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.15" - } - }, - "node_modules/css-minimizer-webpack-plugin/node_modules/postcss-minify-gradients": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/postcss-minify-gradients/-/postcss-minify-gradients-5.1.0.tgz", - "integrity": "sha512-J/TMLklkONn3LuL8wCwfwU8zKC1hpS6VcxFkNUNjmVt53uKqrrykR3ov11mdUYyqVMEx67slMce0tE14cE4DTg==", - "license": "MIT", - "dependencies": { - "colord": "^2.9.1", - "cssnano-utils": "^3.1.0", - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.15" - } - }, - "node_modules/css-minimizer-webpack-plugin/node_modules/postcss-minify-params": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/postcss-minify-params/-/postcss-minify-params-5.1.1.tgz", - "integrity": "sha512-WCpr+J9Uz8XzMpAfg3UL8z5rde6MifBbh5L8bn8S2F5hq/YDJJzASYCnCHvAB4Fqb94ys8v95ULQkW2EhCFvNg==", - "license": "MIT", - "dependencies": { - "browserslist": "^4.16.6", - "cssnano-utils": "^3.1.0", - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.15" - } - }, - "node_modules/css-minimizer-webpack-plugin/node_modules/postcss-ordered-values": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/postcss-ordered-values/-/postcss-ordered-values-5.1.0.tgz", - "integrity": "sha512-wU4Z4D4uOIH+BUKkYid36gGDJNQtkVJT7Twv8qH6UyfttbbJWyw4/xIPuVEkkCtQLAJ0EdsNSh8dlvqkXb49TA==", - "license": "MIT", - "dependencies": { - "cssnano-utils": "^3.1.0", - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.15" - } - }, "node_modules/css-select": { "version": "4.2.1", "resolved": "https://registry.npmjs.org/css-select/-/css-select-4.2.1.tgz", @@ -9003,10 +8765,9 @@ } }, "node_modules/minimist": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", - "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==", - "license": "MIT" + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", + "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==" }, "node_modules/mkdirp": { "version": "0.5.5", @@ -12331,12 +12092,6 @@ "integrity": "sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA==", "license": "MIT" }, - "node_modules/timsort": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/timsort/-/timsort-0.3.0.tgz", - "integrity": "sha1-QFQRqOfmM5/mTbmiNN4R3DHgK9Q=", - "license": "MIT" - }, "node_modules/tiny-invariant": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.2.0.tgz", @@ -14405,57 +14160,6 @@ "integrity": "sha512-1yRi7yAtB0ETgxdY9ti/p2TivUxJkTdhu/ZbF9MshVGqOx1TdB3b7xCXs49Fupgg50N45KcAsRP/ZqWjs9SRjg==", "requires": { "@babel/helper-plugin-utils": "^7.10.4" - }, - "dependencies": { - "@babel/core": { - "version": "7.12.9", - "peer": true, - "requires": { - "@babel/code-frame": "^7.10.4", - "@babel/generator": "^7.12.5", - "@babel/helper-module-transforms": "^7.12.1", - "@babel/helpers": "^7.12.5", - "@babel/parser": "^7.12.7", - "@babel/template": "^7.12.7", - "@babel/traverse": "^7.12.9", - "@babel/types": "^7.12.7", - "convert-source-map": "^1.7.0", - "debug": "^4.1.0", - "gensync": "^1.0.0-beta.1", - "json5": "^2.1.2", - "lodash": "^4.17.19", - "resolve": "^1.3.2", - "semver": "^5.4.1", - "source-map": "^0.5.0" - } - }, - "debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", - "peer": true, - "requires": { - "ms": "2.1.2" - } - }, - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "peer": true - }, - "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", - "peer": true - }, - "source-map": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", - "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", - "peer": true - } } }, "@babel/plugin-syntax-logical-assignment-operators": { @@ -17406,111 +17110,6 @@ "schema-utils": "^4.0.0", "serialize-javascript": "^6.0.0", "source-map": "^0.6.1" - }, - "dependencies": { - "css-declaration-sorter": { - "version": "6.1.4", - "resolved": "https://registry.npmjs.org/css-declaration-sorter/-/css-declaration-sorter-6.1.4.tgz", - "integrity": "sha512-lpfkqS0fctcmZotJGhnxkIyJWvBXgpyi2wsFd4J8VB7wzyrT6Ch/3Q+FMNJpjK4gu1+GN5khOnpU2ZVKrLbhCw==", - "requires": { - "timsort": "^0.3.0" - } - }, - "cssnano": { - "version": "5.1.4", - "resolved": "https://registry.npmjs.org/cssnano/-/cssnano-5.1.4.tgz", - "integrity": "sha512-hbfhVZreEPyzl+NbvRsjNo54JOX80b+j6nqG2biLVLaZHJEiqGyMh4xDGHtwhUKd5p59mj2GlDqlUBwJUuIu5A==", - "requires": { - "cssnano-preset-default": "^*", - "lilconfig": "^2.0.3", - "yaml": "^1.10.2" - } - }, - "cssnano-preset-default": { - "version": "5.2.4", - "resolved": "https://registry.npmjs.org/cssnano-preset-default/-/cssnano-preset-default-5.2.4.tgz", - "integrity": "sha512-w1Gg8xsebln6/axZ6qDFQHuglrGfbIHOIx0g4y9+etRlRab8CGpSpe6UMsrgJe4zhCaJ0LwLmc+PhdLRTwnhIA==", - "requires": { - "css-declaration-sorter": "^6.0.3", - "cssnano-utils": "^*", - "postcss-calc": "^8.2.3", - "postcss-colormin": "^*", - "postcss-convert-values": "^*", - "postcss-discard-comments": "^*", - "postcss-discard-duplicates": "^*", - "postcss-discard-empty": "^*", - "postcss-discard-overridden": "^*", - "postcss-merge-longhand": "^*", - "postcss-merge-rules": "^*", - "postcss-minify-font-values": "^*", - "postcss-minify-gradients": "^*", - "postcss-minify-params": "^*", - "postcss-minify-selectors": "^*", - "postcss-normalize-charset": "^*", - "postcss-normalize-display-values": "^*", - "postcss-normalize-positions": "^*", - "postcss-normalize-repeat-style": "^*", - "postcss-normalize-string": "^*", - "postcss-normalize-timing-functions": "^*", - "postcss-normalize-unicode": "^*", - "postcss-normalize-url": "^*", - "postcss-normalize-whitespace": "^*", - "postcss-ordered-values": "^*", - "postcss-reduce-initial": "^*", - "postcss-reduce-transforms": "^*", - "postcss-svgo": "^*", - "postcss-unique-selectors": "^*" - } - }, - "postcss-merge-longhand": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/postcss-merge-longhand/-/postcss-merge-longhand-5.1.2.tgz", - "integrity": "sha512-18/bp9DZnY1ai9RlahOfLBbmIUKfKFPASxRCiZ1vlpZqWPCn8qWPFlEozqmWL+kBtcEQmG8W9YqGCstDImvp/Q==", - "requires": { - "postcss-value-parser": "^4.2.0", - "stylehacks": "^*" - } - }, - "postcss-merge-rules": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/postcss-merge-rules/-/postcss-merge-rules-5.1.0.tgz", - "integrity": "sha512-NecukEJovQ0mG7h7xV8wbYAkXGTO3MPKnXvuiXzOKcxoOodfTTKYjeo8TMhAswlSkjcPIBlnKbSFcTuVSDaPyQ==", - "requires": { - "browserslist": "^4.16.6", - "caniuse-api": "^3.0.0", - "cssnano-utils": "^3.1.0", - "postcss-selector-parser": "^6.0.5" - } - }, - "postcss-minify-gradients": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/postcss-minify-gradients/-/postcss-minify-gradients-5.1.0.tgz", - "integrity": "sha512-J/TMLklkONn3LuL8wCwfwU8zKC1hpS6VcxFkNUNjmVt53uKqrrykR3ov11mdUYyqVMEx67slMce0tE14cE4DTg==", - "requires": { - "colord": "^2.9.1", - "cssnano-utils": "^3.1.0", - "postcss-value-parser": "^4.2.0" - } - }, - "postcss-minify-params": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/postcss-minify-params/-/postcss-minify-params-5.1.1.tgz", - "integrity": "sha512-WCpr+J9Uz8XzMpAfg3UL8z5rde6MifBbh5L8bn8S2F5hq/YDJJzASYCnCHvAB4Fqb94ys8v95ULQkW2EhCFvNg==", - "requires": { - "browserslist": "^4.16.6", - "cssnano-utils": "^3.1.0", - "postcss-value-parser": "^4.2.0" - } - }, - "postcss-ordered-values": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/postcss-ordered-values/-/postcss-ordered-values-5.1.0.tgz", - "integrity": "sha512-wU4Z4D4uOIH+BUKkYid36gGDJNQtkVJT7Twv8qH6UyfttbbJWyw4/xIPuVEkkCtQLAJ0EdsNSh8dlvqkXb49TA==", - "requires": { - "cssnano-utils": "^3.1.0", - "postcss-value-parser": "^4.2.0" - } - } } }, "css-select": { @@ -19705,9 +19304,9 @@ } }, "minimist": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", - "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==" + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", + "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==" }, "mkdirp": { "version": "0.5.5", @@ -21898,11 +21497,6 @@ "resolved": "https://registry.npmjs.org/thunky/-/thunky-1.1.0.tgz", "integrity": "sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA==" }, - "timsort": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/timsort/-/timsort-0.3.0.tgz", - "integrity": "sha1-QFQRqOfmM5/mTbmiNN4R3DHgK9Q=" - }, "tiny-invariant": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.2.0.tgz", diff --git a/docusaurus/yarn.lock b/docusaurus/yarn.lock index f3cd123ebb9ebf..4fb157063e0532 100644 --- a/docusaurus/yarn.lock +++ b/docusaurus/yarn.lock @@ -5317,9 +5317,9 @@ minimatch@^3.0.4: brace-expansion "^1.1.7" minimist@^1.2.0, minimist@^1.2.5: - version "1.2.5" - resolved "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz" - integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw== + version "1.2.6" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44" + integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q== mkdirp@^0.5.5: version "0.5.5" From 21c13644ad8a0ba9b2931651c4c7eaecfbce8b7a Mon Sep 17 00:00:00 2001 From: Anne <102554163+alovew@users.noreply.github.com> Date: Tue, 3 May 2022 14:11:50 -0700 Subject: [PATCH 069/152] Add AirbyteTraceMessage to Airbyte protocol (#12458) * Add AirbyteTraceMessage to protocol Co-authored-by: Lake Mossman --- .../airbyte_protocol/airbyte_protocol.yaml | 43 +++++++++++++++++++ .../airbyte-specification.md | 5 ++- 2 files changed, 46 insertions(+), 2 deletions(-) diff --git a/airbyte-protocol/models/src/main/resources/airbyte_protocol/airbyte_protocol.yaml b/airbyte-protocol/models/src/main/resources/airbyte_protocol/airbyte_protocol.yaml index cea7e57cfc8b66..1eabc18c01ebea 100644 --- a/airbyte-protocol/models/src/main/resources/airbyte_protocol/airbyte_protocol.yaml +++ b/airbyte-protocol/models/src/main/resources/airbyte_protocol/airbyte_protocol.yaml @@ -26,6 +26,7 @@ definitions: - SPEC - CONNECTION_STATUS - CATALOG + - TRACE log: description: "log message: any kind of logging you want the platform to know about." "$ref": "#/definitions/AirbyteLogMessage" @@ -43,6 +44,9 @@ definitions: state: description: "schema message: the state. Must be the last message produced. The platform uses this information" "$ref": "#/definitions/AirbyteStateMessage" + trace: + description: "trace message: a message to communicate information about the status and performance of a connector" + "$ref": "#/definitions/AirbyteTraceMessage" AirbyteRecordMessage: type: object additionalProperties: true @@ -94,6 +98,45 @@ definitions: message: description: "the log message" type: string + AirbyteTraceMessage: + type: object + additionalProperties: true + required: + - type + - emitted_at + properties: + type: + description: "the type of trace message" + type: string + enum: + - ERROR + emitted_at: + description: "the time in ms that the message was emitted" + type: number + error: + description: "error trace message: the error object" + "$ref": "#/definitions/AirbyteErrorTraceMessage" + AirbyteErrorTraceMessage: + type: object + additionalProperties: true + required: + - message + properties: + message: + description: A user-friendly message that indicates the cause of the error + type: string + internal_message: + description: The internal error that caused the failure + type: string + stack_trace: + description: The full stack trace of the error + type: string + failure_type: + description: The type of error + type: string + enum: + - system_error + - config_error AirbyteConnectionStatus: description: Airbyte connection status type: object diff --git a/docs/understanding-airbyte/airbyte-specification.md b/docs/understanding-airbyte/airbyte-specification.md index 351613b03ef14f..a06f66520411e5 100644 --- a/docs/understanding-airbyte/airbyte-specification.md +++ b/docs/understanding-airbyte/airbyte-specification.md @@ -214,8 +214,9 @@ For the sake of brevity, we will not re-describe `spec` and `check`. They are ex ## The Airbyte Protocol * All messages passed to and from connectors must be wrapped in an `AirbyteMessage` envelope and serialized as JSON. The JsonSchema specification for these messages can be found [here](https://github.com/airbytehq/airbyte/blob/922bfd08a9182443599b78dbb273d70cb9f63d30/airbyte-protocol/models/src/main/resources/airbyte_protocol/airbyte_protocol.yaml#L13-L45). -* Even if a record is wrapped in an `AirbyteMessage` it will only be processed if it appropriate for the given command. e.g. If a source `read` action includes AirbyteMessages in its stream of type Catalog for instance, these messages will be ignored as the `read` interface only expects `AirbyteRecordMessage`s and `AirbyteStateMessage`s. The appropriate `AirbyteMessage` types have been described in each command above. -* **ALL** actions are allowed to return `AirbyteLogMessage`s on stdout. For brevity, we have not mentioned these log messages in the description of each action, but they are always allowed. An `AirbyteLogMessage` wraps any useful logging that the connector wants to provide. These logs will be written to Airbyte's log files and output to the console. +* Even if a record is wrapped in an `AirbyteMessage` it will only be processed if it is appropriate for the given command. e.g. If a source `read` action includes AirbyteMessages in its stream of type Catalog for instance, these messages will be ignored as the `read` interface only expects `AirbyteRecordMessage`s and `AirbyteStateMessage`s. The appropriate `AirbyteMessage` types have been described in each command above. +* **ALL** actions are allowed to return `AirbyteLogMessage`s and `AirbyteTraceMessage`s on stdout. For brevity, we have not mentioned these messages in the description of each action, but they are always allowed. An `AirbyteLogMessage` wraps any useful logging that the connector wants to provide. These logs will be written to Airbyte's log files and output to the console. An `AirbyteTraceMessage` provides structured information about the performance and status of a connector, such as the failure reason in the event of an error. + * I/O: * Connectors receive arguments on the command line via JSON files. `e.g. --catalog catalog.json` * They read `AirbyteMessage`s from stdin. The destination `write` action is the only command that consumes `AirbyteMessage`s. From 8cfad1b4537f9936bd2c3737289c1d446a174025 Mon Sep 17 00:00:00 2001 From: Teal Larson Date: Tue, 3 May 2022 17:23:08 -0400 Subject: [PATCH 070/152] Don't allow users to edit deleted connections (#12254) * add 'deleted' mode to ConnectionForm Co-authored-by: Edmundo Ruiz Ghanem * refactor 'out of credits banner' to generic error banner * Disable fields and remove buttons * use mode instead of checking connection, cleanup * close... todo finish replication page (search, refresh button) * cleanup * error banner is really an alert banner * make ConnectionFormMode its own type * Update airbyte-webapp/src/components/MainPageWithScroll/MainPageWithScroll.tsx Co-authored-by: Edmundo Ruiz Ghanem <168664+edmundito@users.noreply.github.com> * Update airbyte-webapp/src/views/Connection/ConnectionForm/components/SyncCatalogField.tsx Co-authored-by: Edmundo Ruiz Ghanem <168664+edmundito@users.noreply.github.com> * cleanup, types * Update airbyte-webapp/src/locales/en.json Co-authored-by: Andy Jih * fix transformation edit mode, fix circleLoader css error * fix fieldset pointerEvents * Update airbyte-webapp/src/pages/ConnectionPage/pages/ConnectionItemPage/components/ConnectionPageTitle.tsx Co-authored-by: Krishna Glick * Update airbyte-webapp/src/pages/ConnectionPage/pages/ConnectionItemPage/ConnectionItemPage.tsx Co-authored-by: Krishna Glick * Update airbyte-webapp/src/pages/ConnectionPage/pages/ConnectionItemPage/components/ConnectionPageTitle.tsx Co-authored-by: Krishna Glick * Update airbyte-webapp/src/pages/ConnectionPage/pages/ConnectionItemPage/components/StatusView.tsx Co-authored-by: Krishna Glick * Update airbyte-webapp/src/pages/ConnectionPage/pages/ConnectionItemPage/components/StatusView.tsx Co-authored-by: Krishna Glick * Update airbyte-webapp/src/views/Connection/FormCard.tsx Co-authored-by: Krishna Glick * Update airbyte-webapp/src/views/Connection/ConnectionForm/ConnectionForm.tsx Co-authored-by: Krishna Glick * Update airbyte-webapp/src/views/Connection/ConnectionForm/ConnectionForm.tsx Co-authored-by: Krishna Glick * Update airbyte-webapp/src/pages/ConnectionPage/pages/ConnectionItemPage/components/TransformationView.tsx Co-authored-by: Krishna Glick * Update airbyte-webapp/src/pages/ConnectionPage/pages/ConnectionItemPage/components/TransformationView.tsx Co-authored-by: Krishna Glick * string union types to use exported type * formik does not like unknown * fix modes, types on transformation forms Co-authored-by: Krishna Glick * inline export * clean up types/exports for ArrayOfObjectsEditor * EditorHeader tests * connection form tests * types cleanup * import fix * fix another bad import * Don't show checkboxes on replication view page for deleted * use `NavigateReplace` on settings route when deleted * fix test * wording tweak from code review * clarity in EditorHeader test from code review * fix deleted view of connection form fields Co-authored-by: Edmundo Ruiz Ghanem Co-authored-by: Edmundo Ruiz Ghanem <168664+edmundito@users.noreply.github.com> Co-authored-by: Andy Jih Co-authored-by: Krishna Glick Co-authored-by: Krishna Glick --- .../ArrayOfObjectsEditor.tsx | 34 ++++--- .../components/EditorHeader.test.tsx | 46 +++++++++ .../components/EditorHeader.tsx | 13 ++- .../CreateConnectionContent.tsx | 1 + .../LabeledRadioButton/LabeledRadioButton.tsx | 2 +- .../MainPageWithScroll/MainPageWithScroll.tsx | 14 ++- .../components/base/Banner/AlertBanner.tsx | 41 ++++++++ .../base/RadioButton/RadioButton.tsx | 4 +- .../src/core/domain/connection/types.ts | 2 +- airbyte-webapp/src/locales/en.json | 2 + .../cloud/views/layout/MainView/MainView.tsx | 6 +- .../components/CreditsProblemBanner.tsx | 38 ------- .../ConnectionItemPage/ConnectionItemPage.tsx | 25 ++++- .../components/ConnectionPageTitle.tsx | 16 +-- .../components/ReplicationView.tsx | 10 +- .../components/StatusMainInfo.tsx | 20 ++-- .../components/StatusView.tsx | 52 +++++----- .../components/TransformationView.tsx | 61 ++++++++---- airbyte-webapp/src/utils/testutils.tsx | 15 ++- .../Connection/CatalogTree/CatalogSection.tsx | 4 + .../Connection/CatalogTree/CatalogTree.tsx | 9 +- .../Connection/CatalogTree/StreamHeader.tsx | 21 +++- .../src/views/Connection/CollapsibleCard.tsx | 4 +- .../ConnectionForm/ConnectionForm.test.tsx | 99 +++++++++++++++++++ .../ConnectionForm/ConnectionForm.tsx | 37 ++++--- .../components/NormalizationField.tsx | 33 ++++--- .../components/SyncCatalogField.tsx | 58 ++++++----- .../components/TransformationField.tsx | 6 +- .../src/views/Connection/FormCard.tsx | 60 ++++++----- 29 files changed, 511 insertions(+), 222 deletions(-) create mode 100644 airbyte-webapp/src/components/ArrayOfObjectsEditor/components/EditorHeader.test.tsx create mode 100644 airbyte-webapp/src/components/base/Banner/AlertBanner.tsx delete mode 100644 airbyte-webapp/src/packages/cloud/views/layout/MainView/components/CreditsProblemBanner.tsx create mode 100644 airbyte-webapp/src/views/Connection/ConnectionForm/ConnectionForm.test.tsx diff --git a/airbyte-webapp/src/components/ArrayOfObjectsEditor/ArrayOfObjectsEditor.tsx b/airbyte-webapp/src/components/ArrayOfObjectsEditor/ArrayOfObjectsEditor.tsx index 85370ebbef1a3a..7cb4b0c2658646 100644 --- a/airbyte-webapp/src/components/ArrayOfObjectsEditor/ArrayOfObjectsEditor.tsx +++ b/airbyte-webapp/src/components/ArrayOfObjectsEditor/ArrayOfObjectsEditor.tsx @@ -4,6 +4,8 @@ import styled from "styled-components"; import { Button } from "components"; +import { ConnectionFormMode } from "views/Connection/ConnectionForm/ConnectionForm"; + import { EditorHeader } from "./components/EditorHeader"; import { EditorRow } from "./components/EditorRow"; @@ -26,7 +28,7 @@ const Content = styled.div` margin-bottom: 20px; `; -type ArrayOfObjectsEditorProps = { +export interface ArrayOfObjectsEditorProps { items: T[]; editableItemIndex?: number | string | null; children: (item?: T) => React.ReactNode; @@ -36,20 +38,26 @@ type ArrayOfObjectsEditorProps = { onCancelEdit?: () => void; onDone?: () => void; onRemove: (index: number) => void; -}; + mode?: ConnectionFormMode; +} -function ArrayOfObjectsEditor( - props: ArrayOfObjectsEditorProps -): JSX.Element { - const { onStartEdit, onDone, onRemove, onCancelEdit, items, editableItemIndex, children, mainTitle, addButtonText } = - props; +export function ArrayOfObjectsEditor({ + onStartEdit, + onDone, + onRemove, + onCancelEdit, + items, + editableItemIndex, + children, + mainTitle, + addButtonText, + mode, +}: ArrayOfObjectsEditorProps): JSX.Element { const onAddItem = React.useCallback(() => onStartEdit(items.length), [onStartEdit, items]); - const isEditMode = editableItemIndex !== null && editableItemIndex !== undefined; - - if (isEditMode) { + const isEditable = editableItemIndex !== null && editableItemIndex !== undefined; + if (mode !== "readonly" && isEditable) { const item = typeof editableItemIndex === "number" ? items[editableItemIndex] : undefined; - return ( {children(item)} @@ -78,6 +86,7 @@ function ArrayOfObjectsEditor( onAddItem={onAddItem} mainTitle={mainTitle} addButtonText={addButtonText} + mode={mode} /> {items.length ? ( @@ -89,6 +98,3 @@ function ArrayOfObjectsEditor( ); } - -export { ArrayOfObjectsEditor }; -export type { ArrayOfObjectsEditorProps }; diff --git a/airbyte-webapp/src/components/ArrayOfObjectsEditor/components/EditorHeader.test.tsx b/airbyte-webapp/src/components/ArrayOfObjectsEditor/components/EditorHeader.test.tsx new file mode 100644 index 00000000000000..d803bef725ad40 --- /dev/null +++ b/airbyte-webapp/src/components/ArrayOfObjectsEditor/components/EditorHeader.test.tsx @@ -0,0 +1,46 @@ +import { render } from "utils/testutils"; + +import { EditorHeader } from "./EditorHeader"; + +describe("", () => { + let container: HTMLElement; + describe("edit mode", () => { + test("it renders only relevant items for the mode", async () => { + const renderResult = await render( + "This is the main title"

} + addButtonText={
"button text"
} + itemsCount={0} + onAddItem={() => { + return null; + }} + mode="edit" + /> + ); + container = renderResult.container; + const mainTitle = container.querySelector("div[data-testid='mainTitle']"); + const addButtonText = container.querySelector("div[data-testid='addButtonText']"); + expect(mainTitle).toBeInTheDocument(); + expect(addButtonText).toBeInTheDocument(); + }); + }); + describe("readonly mode", () => { + test("it renders only relevant items for the mode", async () => { + const renderResult = await render( + "This is the main title"} + addButtonText={
"button text"
} + itemsCount={0} + onAddItem={() => { + return null; + }} + mode="readonly" + /> + ); + container = renderResult.container; + const mainTitle = container.querySelector("div[data-testid='mainTitle']"); + expect(mainTitle).toBeInTheDocument(); + expect(container.querySelector("div[data-testid='addButtonText']")).not.toBeInTheDocument(); + }); + }); +}); diff --git a/airbyte-webapp/src/components/ArrayOfObjectsEditor/components/EditorHeader.tsx b/airbyte-webapp/src/components/ArrayOfObjectsEditor/components/EditorHeader.tsx index 3c6a45b85719c3..06787d4eca2736 100644 --- a/airbyte-webapp/src/components/ArrayOfObjectsEditor/components/EditorHeader.tsx +++ b/airbyte-webapp/src/components/ArrayOfObjectsEditor/components/EditorHeader.tsx @@ -4,6 +4,8 @@ import styled from "styled-components"; import { Button } from "components"; +import { ConnectionFormMode } from "views/Connection/ConnectionForm/ConnectionForm"; + const Content = styled.div` display: flex; justify-content: space-between; @@ -21,15 +23,18 @@ type EditorHeaderProps = { addButtonText?: React.ReactNode; itemsCount: number; onAddItem: () => void; + mode?: ConnectionFormMode; }; -const EditorHeader: React.FC = ({ itemsCount, onAddItem, mainTitle, addButtonText }) => { +const EditorHeader: React.FC = ({ itemsCount, onAddItem, mainTitle, addButtonText, mode }) => { return ( {mainTitle || } - + {mode !== "readonly" && ( + + )} ); }; diff --git a/airbyte-webapp/src/components/CreateConnectionContent/CreateConnectionContent.tsx b/airbyte-webapp/src/components/CreateConnectionContent/CreateConnectionContent.tsx index 524f95ef67ea7d..917698bb392dad 100644 --- a/airbyte-webapp/src/components/CreateConnectionContent/CreateConnectionContent.tsx +++ b/airbyte-webapp/src/components/CreateConnectionContent/CreateConnectionContent.tsx @@ -117,6 +117,7 @@ const CreateConnectionContent: React.FC = ({ ) : ( }> = (props) => ( - +