Skip to content

Commit

Permalink
update destination_specs.yaml (#9007)
Browse files Browse the repository at this point in the history
  • Loading branch information
subodh1810 committed Dec 21, 2021
1 parent 8654c4a commit ddbce46
Show file tree
Hide file tree
Showing 4 changed files with 99 additions and 8 deletions.
98 changes: 95 additions & 3 deletions airbyte-config/init/src/main/resources/seed/destination_specs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -354,7 +354,7 @@
- "overwrite"
- "append"
- "append_dedup"
- dockerImage: "airbyte/destination-bigquery-denormalized:0.1.11"
- dockerImage: "airbyte/destination-bigquery-denormalized:0.2.1"
spec:
documentationUrl: "https://docs.airbyte.io/integrations/destinations/bigquery"
connectionSpecification:
Expand All @@ -366,6 +366,19 @@
- "dataset_id"
additionalProperties: true
properties:
big_query_client_buffer_size_mb:
title: "Google BigQuery client chunk size"
description: "Google BigQuery client's chunk (buffer) size (MIN = 1, MAX\
\ = 15) for each table. It defaults to 15MiB. Smaller chunk size means\
\ less memory consumption, and is recommended for big data sets. For more\
\ details refer to the documentation <a href=\"https://googleapis.dev/python/bigquery/latest/generated/google.cloud.bigquery.client.Client.html\"\
>here</a>"
type: "integer"
minimum: 1
maximum: 15
default: 15
examples:
- "15"
project_id:
type: "string"
description: "The GCP project ID for the project containing the target BigQuery\
Expand Down Expand Up @@ -422,6 +435,85 @@
\ be used if this field is left empty."
title: "Credentials JSON"
airbyte_secret: true
loading_method:
type: "object"
title: "Loading Method"
description: "Select the way that data will be uploaded to BigQuery."
oneOf:
- title: "Standard Inserts"
additionalProperties: false
description: "Direct uploading using streams."
required:
- "method"
properties:
method:
type: "string"
const: "Standard"
- title: "GCS Staging"
additionalProperties: false
description: "Writes large batches of records to a file, uploads the file\
\ to GCS, then uses <pre>COPY INTO table</pre> to upload the file. Recommended\
\ for large production workloads for better speed and scalability."
required:
- "method"
- "gcs_bucket_name"
- "gcs_bucket_path"
- "credential"
properties:
method:
type: "string"
const: "GCS Staging"
gcs_bucket_name:
title: "GCS Bucket Name"
type: "string"
description: "The name of the GCS bucket."
examples:
- "airbyte_sync"
gcs_bucket_path:
description: "Directory under the GCS bucket where data will be written."
type: "string"
examples:
- "data_sync/test"
keep_files_in_gcs-bucket:
type: "string"
description: "This upload method is supposed to temporary store records\
\ in GCS bucket. What do you want to do with data in GCS bucket\
\ when migration has finished?"
title: "GCS tmp files afterward processing"
default: "Delete all tmp files from GCS"
enum:
- "Delete all tmp files from GCS"
- "Keep all tmp files in GCS"
credential:
title: "Credential"
type: "object"
oneOf:
- title: "HMAC key"
required:
- "credential_type"
- "hmac_key_access_id"
- "hmac_key_secret"
properties:
credential_type:
type: "string"
const: "HMAC_KEY"
hmac_key_access_id:
type: "string"
description: "HMAC key access ID. When linked to a service account,\
\ this ID is 61 characters long; when linked to a user account,\
\ it is 24 characters long."
title: "HMAC Key Access ID"
airbyte_secret: true
examples:
- "1234567890abcdefghij1234"
hmac_key_secret:
type: "string"
description: "The corresponding secret for the access ID. It\
\ is a 40-character base-64 encoded string."
title: "HMAC Key Secret"
airbyte_secret: true
examples:
- "1234567890abcdefghij1234567890ABCDEFGHIJ"
supportsIncremental: true
supportsNormalization: false
supportsDBT: true
Expand Down Expand Up @@ -1014,7 +1106,7 @@
- "overwrite"
- "append"
supportsNamespaces: true
- dockerImage: "airbyte/destination-gcs:0.1.16"
- dockerImage: "airbyte/destination-gcs:0.1.17"
spec:
documentationUrl: "https://docs.airbyte.io/integrations/destinations/gcs"
connectionSpecification:
Expand Down Expand Up @@ -3259,7 +3351,7 @@
supported_destination_sync_modes:
- "append"
- "overwrite"
- dockerImage: "airbyte/destination-s3:0.2.1"
- dockerImage: "airbyte/destination-s3:0.2.2"
spec:
documentationUrl: "https://docs.airbyte.io/integrations/destinations/s3"
connectionSpecification:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ public GcsAvroWriter(final GcsDestinationConfig config,

Schema schema = (airbyteSchema == null ? GcsUtils.getDefaultAvroSchema(stream.getName(), stream.getNamespace(), true)
: new JsonToAvroSchemaConverter().getAvroSchema(airbyteSchema, stream.getName(),
stream.getNamespace(), true, false, false,true));
stream.getNamespace(), true, false, false, true));
LOGGER.info("Avro schema : {}", schema);
final String outputFilename = BaseGcsWriter.getOutputFilename(uploadTimestamp, S3Format.AVRO);
objectKey = String.join("/", outputPrefix, outputFilename);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,8 @@ Schema getSingleFieldType(final String fieldName,
String.format("Array field %s has invalid items property: %s", fieldName, items));
}
}
case OBJECT -> fieldSchema = getAvroSchema(fieldDefinition, fieldName, jsonNodePathMap.get(fieldDefinition), false, appendExtraProps, addStringToLogicalTypes, false);
case OBJECT -> fieldSchema =
getAvroSchema(fieldDefinition, fieldName, jsonNodePathMap.get(fieldDefinition), false, appendExtraProps, addStringToLogicalTypes, false);
default -> throw new IllegalStateException(
String.format("Unexpected type for field %s: %s", fieldName, fieldType));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,7 @@

def setup_responses():
responses.add(
responses.POST,
"https://sandbox-lever.auth0.com/oauth/token",
json={"access_token": "fake_access_token", "expires_in": 3600}
responses.POST, "https://sandbox-lever.auth0.com/oauth/token", json={"access_token": "fake_access_token", "expires_in": 3600}
)


Expand Down

0 comments on commit ddbce46

Please sign in to comment.